mirror of https://github.com/vladmandic/human
guard against invalid input images
parent
22c809f17d
commit
ee9d0d50b2
|
@ -1,6 +1,6 @@
|
|||
# @vladmandic/human
|
||||
|
||||
Version: **1.1.0**
|
||||
Version: **1.1.1**
|
||||
Description: **Human: AI-powered 3D Face Detection, Face Embedding & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition**
|
||||
|
||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||
|
@ -9,8 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/03/11 mandic00@live.com
|
||||
### **1.1.1** 2021/03/12 mandic00@live.com
|
||||
|
||||
- switched face embedding to mobileface
|
||||
|
||||
### **1.0.4** 2021/03/11 mandic00@live.com
|
||||
|
||||
|
|
13
TODO.md
13
TODO.md
|
@ -2,19 +2,6 @@
|
|||
|
||||
- Strong typing
|
||||
- Automated testing
|
||||
- Guard against corrupt input
|
||||
- Improve face embedding
|
||||
- Dynamic sample processing
|
||||
- Explore EfficientPose
|
||||
<https://github.com/daniegr/EfficientPose>
|
||||
<https://github.com/PINTO0309/PINTO_model_zoo/tree/main/084_EfficientPose>
|
||||
|
||||
## WiP: Embedding
|
||||
|
||||
- Implement offsetRaw
|
||||
|
||||
full with and without rotation
|
||||
full with and without embedding
|
||||
full with any without mesh
|
||||
embedding with and without mesh
|
||||
boxRaw and meshRaw with and without mesh
|
||||
|
|
|
@ -3,9 +3,8 @@ import Human from '../src/human';
|
|||
import Menu from './menu.js';
|
||||
import GLBench from './gl-bench.js';
|
||||
|
||||
const userConfig = { backend: 'webgl' }; // add any user configuration overrides
|
||||
// const userConfig = { backend: 'webgl' }; // add any user configuration overrides
|
||||
|
||||
/*
|
||||
const userConfig = {
|
||||
backend: 'wasm',
|
||||
async: false,
|
||||
|
@ -16,7 +15,6 @@ const userConfig = {
|
|||
gesture: { enabled: false },
|
||||
body: { enabled: false, modelPath: '../models/blazepose.json' },
|
||||
};
|
||||
*/
|
||||
|
||||
const human = new Human(userConfig);
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -25,7 +25,7 @@
|
|||
"@vladmandic/pilogger": "^0.2.14",
|
||||
"chokidar": "^3.5.1",
|
||||
"dayjs": "^1.10.4",
|
||||
"esbuild": "=0.9.0",
|
||||
"esbuild": "^0.9.2",
|
||||
"eslint": "^7.21.0",
|
||||
"eslint-config-airbnb-base": "^14.2.1",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
|
@ -1243,9 +1243,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/esbuild": {
|
||||
"version": "0.9.0",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.9.0.tgz",
|
||||
"integrity": "sha512-IqYFO7ZKHf0y4uJpJfGqInmSRn8jMPMbyI1W0Y2PSjSjJcVP538tC8TleJAS4Y8QeqwajqBTwFKayWVzYlMIgg==",
|
||||
"version": "0.9.2",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.9.2.tgz",
|
||||
"integrity": "sha512-xE3oOILjnmN8PSjkG3lT9NBbd1DbxNqolJ5qNyrLhDWsFef3yTp/KTQz1C/x7BYFKbtrr9foYtKA6KA1zuNAUQ==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"bin": {
|
||||
|
@ -4841,9 +4841,9 @@
|
|||
}
|
||||
},
|
||||
"esbuild": {
|
||||
"version": "0.9.0",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.9.0.tgz",
|
||||
"integrity": "sha512-IqYFO7ZKHf0y4uJpJfGqInmSRn8jMPMbyI1W0Y2PSjSjJcVP538tC8TleJAS4Y8QeqwajqBTwFKayWVzYlMIgg==",
|
||||
"version": "0.9.2",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.9.2.tgz",
|
||||
"integrity": "sha512-xE3oOILjnmN8PSjkG3lT9NBbd1DbxNqolJ5qNyrLhDWsFef3yTp/KTQz1C/x7BYFKbtrr9foYtKA6KA1zuNAUQ==",
|
||||
"dev": true
|
||||
},
|
||||
"escalade": {
|
||||
|
|
12
package.json
12
package.json
|
@ -21,11 +21,11 @@
|
|||
"url": "git+https://github.com/vladmandic/human.git"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
||||
"dev": "npm install && node server/serve.js",
|
||||
"build": "rimraf dist/* && rimraf types/* && node server/build.js && node server/changelog.js",
|
||||
"lint": "eslint src demo server",
|
||||
"test": "eslint src demo server"
|
||||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation demo/node.js",
|
||||
"dev": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught server/serve.js",
|
||||
"build": "rimraf dist/* && rimraf types/* && node --trace-warnings --unhandled-rejections=strict --trace-uncaught server/build.js && node server/changelog.js",
|
||||
"lint": "eslint src server demo",
|
||||
"test": "npm run lint && npm run start"
|
||||
},
|
||||
"keywords": [
|
||||
"tensorflowjs",
|
||||
|
@ -60,7 +60,7 @@
|
|||
"@vladmandic/pilogger": "^0.2.14",
|
||||
"chokidar": "^3.5.1",
|
||||
"dayjs": "^1.10.4",
|
||||
"esbuild": "=0.9.0",
|
||||
"esbuild": "^0.9.2",
|
||||
"eslint": "^7.21.0",
|
||||
"eslint-config-airbnb-base": "^14.2.1",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
|
|
|
@ -39,9 +39,8 @@ export class MediaPipeFaceMesh {
|
|||
Math.max(0, prediction.box.startPoint[0] / input.shape[2]),
|
||||
Math.max(0, prediction.box.startPoint[1] / input.shape[1]),
|
||||
Math.min(input.shape[2], (prediction.box.endPoint[0]) - prediction.box.startPoint[0]) / input.shape[2],
|
||||
Math.min(input.shape[1], (prediction.box.endPoint[1]) - prediction.box.startPoint[1]) / input.shape[2],
|
||||
Math.min(input.shape[1], (prediction.box.endPoint[1]) - prediction.box.startPoint[1]) / input.shape[1],
|
||||
] : [];
|
||||
/*
|
||||
let offsetRaw = <any>[];
|
||||
if (meshRaw.length > 0 && boxRaw.length > 0) {
|
||||
const dimX = meshRaw.map((pt) => pt[0]);
|
||||
|
@ -53,7 +52,6 @@ export class MediaPipeFaceMesh {
|
|||
Math.min(1, 1 - Math.max(...dimX) + boxRaw[3]), // distance of detected face border to box right edge
|
||||
];
|
||||
}
|
||||
*/
|
||||
results.push({
|
||||
confidence: prediction.faceConfidence || prediction.boxConfidence || 0,
|
||||
boxConfidence: prediction.boxConfidence,
|
||||
|
@ -62,7 +60,7 @@ export class MediaPipeFaceMesh {
|
|||
mesh,
|
||||
boxRaw,
|
||||
meshRaw,
|
||||
// offsetRaw,
|
||||
offsetRaw,
|
||||
annotations,
|
||||
image: prediction.image ? tf.clone(prediction.image) : null,
|
||||
});
|
||||
|
|
21
src/draw.ts
21
src/draw.ts
|
@ -20,8 +20,8 @@ export const options = {
|
|||
bufferedOutput: false,
|
||||
};
|
||||
|
||||
function point(ctx, x, y) {
|
||||
ctx.fillStyle = options.color;
|
||||
function point(ctx, x, y, z = null) {
|
||||
ctx.fillStyle = options.useDepth && z ? `rgba(${127.5 + (2 * (z || 0))}, ${127.5 - (2 * (z || 0))}, 255, 0.3)` : options.color;
|
||||
ctx.beginPath();
|
||||
ctx.arc(x, y, options.pointSize, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
|
@ -53,7 +53,11 @@ function lines(ctx, points: number[] = []) {
|
|||
if (points === undefined || points.length === 0) return;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(points[0][0], points[0][1]);
|
||||
for (const pt of points) ctx.lineTo(pt[0], parseInt(pt[1]));
|
||||
for (const pt of points) {
|
||||
ctx.strokeStyle = options.useDepth && pt[2] ? `rgba(${127.5 + (2 * pt[2])}, ${127.5 - (2 * pt[2])}, 255, 0.3)` : options.color;
|
||||
ctx.fillStyle = options.useDepth && pt[2] ? `rgba(${127.5 + (2 * pt[2])}, ${127.5 - (2 * pt[2])}, 255, 0.3)` : options.color;
|
||||
ctx.lineTo(pt[0], parseInt(pt[1]));
|
||||
}
|
||||
ctx.stroke();
|
||||
if (options.fillPolygons) {
|
||||
ctx.closePath();
|
||||
|
@ -118,6 +122,7 @@ export async function face(inCanvas, result) {
|
|||
ctx.fillStyle = options.color;
|
||||
if (options.drawBoxes) {
|
||||
rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3]);
|
||||
// rect(ctx, inCanvas.width * f.boxRaw[0], inCanvas.height * f.boxRaw[1], inCanvas.width * f.boxRaw[2], inCanvas.height * f.boxRaw[3]);
|
||||
}
|
||||
// silly hack since fillText does not suport new line
|
||||
const labels:string[] = [];
|
||||
|
@ -146,21 +151,17 @@ export async function face(inCanvas, result) {
|
|||
ctx.lineWidth = 1;
|
||||
if (f.mesh) {
|
||||
if (options.drawPoints) {
|
||||
for (const pt of f.mesh) {
|
||||
ctx.fillStyle = options.useDepth ? `rgba(${127.5 + (2 * pt[2])}, ${127.5 - (2 * pt[2])}, 255, 0.5)` : options.color;
|
||||
point(ctx, pt[0], pt[1]);
|
||||
}
|
||||
for (const pt of f.mesh) point(ctx, pt[0], pt[1], pt[2]);
|
||||
// for (const pt of f.meshRaw) point(ctx, pt[0] * inCanvas.offsetWidth, pt[1] * inCanvas.offsetHeight, pt[2]);
|
||||
}
|
||||
if (options.drawPolygons) {
|
||||
ctx.lineWidth = 1;
|
||||
for (let i = 0; i < triangulation.length / 3; i++) {
|
||||
const points = [
|
||||
triangulation[i * 3 + 0],
|
||||
triangulation[i * 3 + 1],
|
||||
triangulation[i * 3 + 2],
|
||||
].map((index) => f.mesh[index]);
|
||||
ctx.strokeStyle = options.useDepth ? `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.3)` : options.color;
|
||||
ctx.fillStyle = options.useDepth ? `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.3)` : options.color;
|
||||
ctx.lineWidth = 1;
|
||||
lines(ctx, points);
|
||||
}
|
||||
// iris: array[center, left, top, right, bottom]
|
||||
|
|
|
@ -37,6 +37,7 @@ export function enhance(input) {
|
|||
const box = input.offsetRaw
|
||||
? [input.offsetRaw] // crop based on face mesh borders
|
||||
: [[0.05, 0.15, 0.85, 0.85]]; // fixed crop for top, left, bottom, right
|
||||
console.log('BOX', box[0]);
|
||||
const tensor = input.image || input.tensor;
|
||||
const crop = tensor.shape.length === 3
|
||||
? tf.image.cropAndResize(tensor.expandDims(0), box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) // add batch if missing
|
||||
|
|
|
@ -4,6 +4,7 @@ import { log } from './log';
|
|||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as fxImage from './imagefx';
|
||||
|
||||
const maxSize = 2048;
|
||||
// internal temp canvases
|
||||
let inCanvas = null;
|
||||
let outCanvas = null;
|
||||
|
@ -22,6 +23,14 @@ export function process(input, config): { tensor, canvas } {
|
|||
const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));
|
||||
let targetWidth = originalWidth;
|
||||
let targetHeight = originalHeight;
|
||||
if (targetWidth > maxSize) {
|
||||
targetWidth = maxSize;
|
||||
targetHeight = targetWidth * originalHeight / originalWidth;
|
||||
}
|
||||
if (targetHeight > maxSize) {
|
||||
targetHeight = maxSize;
|
||||
targetWidth = targetHeight * originalWidth / originalHeight;
|
||||
}
|
||||
if (config.filter.width > 0) targetWidth = config.filter.width;
|
||||
else if (config.filter.height > 0) targetWidth = originalWidth * (config.filter.height / originalHeight);
|
||||
if (config.filter.height > 0) targetHeight = config.filter.height;
|
||||
|
|
Loading…
Reference in New Issue