initial version of facerecognition demo

pull/233/head
Vladimir Mandic 2021-11-09 10:39:23 -05:00
parent b16bb2e8ac
commit 148391fe1c
21 changed files with 515 additions and 55 deletions

View File

@ -143,13 +143,21 @@
"typedoc": "typedoc" "typedoc": "typedoc"
}, },
{ {
"name": "demo/browser", "name": "demo/typescript",
"platform": "browser", "platform": "browser",
"format": "esm", "format": "esm",
"input": "demo/typescript/index.ts", "input": "demo/typescript/index.ts",
"output": "demo/typescript/index.js", "output": "demo/typescript/index.js",
"sourcemap": true, "sourcemap": true,
"minify": false, "external": ["*/human.esm.js"]
},
{
"name": "demo/facerecognition",
"platform": "browser",
"format": "esm",
"input": "demo/facerecognition/index.ts",
"output": "demo/facerecognition/index.js",
"sourcemap": true,
"external": ["*/human.esm.js"] "external": ["*/human.esm.js"]
} }
] ]

View File

@ -29,6 +29,7 @@
"assets", "assets",
"demo/helpers/*.js", "demo/helpers/*.js",
"demo/typescript/*.js", "demo/typescript/*.js",
"demo/facerecognition/*.js",
"dist", "dist",
"media", "media",
"models", "models",
@ -60,6 +61,7 @@
"no-bitwise": "off", "no-bitwise": "off",
"no-case-declarations":"off", "no-case-declarations":"off",
"no-continue": "off", "no-continue": "off",
"no-else-return": "off",
"no-lonely-if": "off", "no-lonely-if": "off",
"no-loop-func": "off", "no-loop-func": "off",
"no-mixed-operators": "off", "no-mixed-operators": "off",

View File

@ -11,9 +11,8 @@
### **HEAD -> main** 2021/11/08 mandic00@live.com ### **HEAD -> main** 2021/11/08 mandic00@live.com
- add type defs when working with relative path imports
### **origin/main** 2021/11/08 mandic00@live.com - disable humangl backend if webgl 1.0 is detected
### **release: 2.5.1** 2021/11/08 mandic00@live.com ### **release: 2.5.1** 2021/11/08 mandic00@live.com

View File

@ -0,0 +1,30 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Human: Face Recognition</title>
<meta name="viewport" content="width=device-width" id="viewport">
<meta name="keywords" content="Human">
<meta name="application-name" content="Human">
<meta name="description" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<meta name="msapplication-tooltip" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<meta name="theme-color" content="#000000">
<link rel="manifest" href="../manifest.webmanifest">
<link rel="shortcut icon" href="../../favicon.ico" type="image/x-icon">
<link rel="apple-touch-icon" href="../../assets/icon.png">
<script src="./index.js" type="module"></script>
<style>
@font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 100; src: local('Lato'), url('../../assets/lato-light.woff2') }
html { font-family: 'Lato', 'Segoe UI'; font-size: 16px; font-variant: small-caps; }
body { margin: 0; background: black; color: white; overflow-x: hidden; width: 100vw; height: 100vh; }
body::-webkit-scrollbar { display: none; }
</style>
</head>
<body>
<canvas id="canvas" style="margin: 0 auto; width: 100%"></canvas>
<video id="video" playsinline style="display: none"></video>
<pre id="fps" style="position: absolute; top: 12px; right: 20px; background-color: grey; padding: 8px; box-shadow: 2px 2px black"></pre>
<pre id="log" style="padding: 8px"></pre>
<div id="status" style="position: absolute; bottom: 0; width: 100%; padding: 8px; font-size: 0.8rem;"></div>
</body>
</html>

View File

@ -0,0 +1,166 @@
/*
Human
homepage: <https://github.com/vladmandic/human>
author: <https://github.com/vladmandic>'
*/
// demo/facerecognition/index.ts
import { Human } from "../../dist/human.esm.js";
var humanConfig = {
modelBasePath: "../../models",
filter: { equalization: true },
face: {
enabled: true,
detector: { rotation: true, return: true },
description: { enabled: true },
iris: { enabled: true },
emotion: { enabled: false },
antispoof: { enabled: true }
},
body: { enabled: false },
hand: { enabled: false },
object: { enabled: false },
gesture: { enabled: true }
};
var options = {
minConfidence: 0.6,
minSize: 224,
maxTime: 1e4
};
var human = new Human(humanConfig);
human.env["perfadd"] = false;
human.draw.options.font = 'small-caps 18px "Lato"';
human.draw.options.lineHeight = 20;
var dom = {
video: document.getElementById("video"),
canvas: document.getElementById("canvas"),
log: document.getElementById("log"),
fps: document.getElementById("fps"),
status: document.getElementById("status")
};
var timestamp = { detect: 0, draw: 0 };
var fps = { detect: 0, draw: 0 };
var startTime = 0;
var log = (...msg) => {
dom.log.innerText += msg.join(" ") + "\n";
console.log(...msg);
};
var printFPS = (msg) => dom.fps.innerText = msg;
var printStatus = (msg) => dom.status.innerText = "status: " + JSON.stringify(msg).replace(/"|{|}/g, "").replace(/,/g, " | ");
async function webCam() {
printFPS("starting webcam...");
const cameraOptions = { audio: false, video: { facingMode: "user", resizeMode: "none", width: { ideal: document.body.clientWidth } } };
const stream = await navigator.mediaDevices.getUserMedia(cameraOptions);
const ready = new Promise((resolve) => {
dom.video.onloadeddata = () => resolve(true);
});
dom.video.srcObject = stream;
dom.video.play();
await ready;
dom.canvas.width = dom.video.videoWidth;
dom.canvas.height = dom.video.videoHeight;
const track = stream.getVideoTracks()[0];
const capabilities = track.getCapabilities ? track.getCapabilities() : "";
const settings = track.getSettings ? track.getSettings() : "";
const constraints = track.getConstraints ? track.getConstraints() : "";
log("video:", dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });
dom.canvas.onclick = () => {
if (dom.video.paused)
dom.video.play();
else
dom.video.pause();
};
}
async function detectionLoop() {
if (!dom.video.paused) {
await human.detect(dom.video);
const now = human.now();
fps.detect = 1e3 / (now - timestamp.detect);
timestamp.detect = now;
requestAnimationFrame(detectionLoop);
}
}
var ok = {
faceCount: false,
faceConfidence: false,
facingCenter: false,
eyesOpen: false,
blinkDetected: false,
faceSize: false,
antispoofCheck: false,
livenessCheck: false,
elapsedMs: 0
};
var allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.faceConfidence && ok.antispoofCheck;
async function validationLoop() {
const interpolated = await human.next(human.result);
await human.draw.canvas(dom.video, dom.canvas);
await human.draw.all(dom.canvas, interpolated);
const now = human.now();
fps.draw = 1e3 / (now - timestamp.draw);
timestamp.draw = now;
printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, " ")} detect | ${fps.draw.toFixed(1).padStart(5, " ")} draw`);
const gestures = Object.values(human.result.gesture).map((gesture) => gesture.gesture);
ok.faceCount = human.result.face.length === 1;
ok.eyesOpen = ok.eyesOpen || !(gestures.includes("blink left eye") || gestures.includes("blink right eye"));
ok.blinkDetected = ok.eyesOpen && ok.blinkDetected || gestures.includes("blink left eye") || gestures.includes("blink right eye");
ok.facingCenter = gestures.includes("facing center") && gestures.includes("looking center");
ok.faceConfidence = (human.result.face[0].boxScore || 0) > options.minConfidence && (human.result.face[0].faceScore || 0) > options.minConfidence && (human.result.face[0].genderScore || 0) > options.minConfidence;
ok.antispoofCheck = (human.result.face[0].real || 0) > options.minConfidence;
ok.faceSize = human.result.face[0].box[2] >= options.minSize && human.result.face[0].box[3] >= options.minSize;
printStatus(ok);
if (allOk()) {
dom.video.pause();
return human.result.face;
} else {
human.tf.dispose(human.result.face[0].tensor);
}
if (ok.elapsedMs > options.maxTime) {
dom.video.pause();
return human.result.face;
} else {
ok.elapsedMs = Math.trunc(human.now() - startTime);
return new Promise((resolve) => {
setTimeout(async () => {
const res = await validationLoop();
if (res)
resolve(human.result.face);
}, 30);
});
}
}
async function detectFace(face) {
dom.canvas.width = face.tensor.shape[2];
dom.canvas.height = face.tensor.shape[1];
dom.canvas.style.width = "";
human.tf.browser.toPixels(face.tensor, dom.canvas);
human.tf.dispose(face.tensor);
}
async function main() {
log("human version:", human.version, "| tfjs version:", human.tf.version_core);
printFPS("loading...");
await human.load();
printFPS("initializing...");
await human.warmup();
await webCam();
await detectionLoop();
startTime = human.now();
const face = await validationLoop();
if (!allOk())
log("did not find valid input", face);
else {
log("found valid face", face);
await detectFace(face[0]);
}
dom.fps.style.display = "none";
}
window.onload = main;
/**
* Human demo for browsers
* @default Human Library
* @summary <https://github.com/vladmandic/human>
* @author <https://github.com/vladmandic>
* @copyright <https://github.com/vladmandic>
* @license MIT
*/
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,175 @@
/**
* Human demo for browsers
* @default Human Library
* @summary <https://github.com/vladmandic/human>
* @author <https://github.com/vladmandic>
* @copyright <https://github.com/vladmandic>
* @license MIT
*/
import { Human } from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
const humanConfig = { // user configuration for human, used to fine-tune behavior
modelBasePath: '../../models',
filter: { equalization: true }, // lets run with histogram equilizer
face: {
enabled: true,
detector: { rotation: true, return: true }, // return tensor is not really needed except to draw detected face
description: { enabled: true },
iris: { enabled: true }, // needed to determine gaze direction
emotion: { enabled: false }, // not needed
antispoof: { enabled: true }, // enable optional antispoof as well
},
body: { enabled: false },
hand: { enabled: false },
object: { enabled: false },
gesture: { enabled: true },
};
const options = {
minConfidence: 0.6, // overal face confidence for box, face, gender, real
minSize: 224, // min input to face descriptor model before degradation
maxTime: 10000, // max time before giving up
};
const human = new Human(humanConfig); // create instance of human with overrides from user configuration
human.env['perfadd'] = false; // is performance data showing instant or total values
human.draw.options.font = 'small-caps 18px "Lato"'; // set font used to draw labels when using draw methods
human.draw.options.lineHeight = 20;
const dom = { // grab instances of dom objects so we dont have to look them up later
video: document.getElementById('video') as HTMLVideoElement,
canvas: document.getElementById('canvas') as HTMLCanvasElement,
log: document.getElementById('log') as HTMLPreElement,
fps: document.getElementById('fps') as HTMLPreElement,
status: document.getElementById('status') as HTMLPreElement,
};
const timestamp = { detect: 0, draw: 0 }; // holds information used to calculate performance and possible memory leaks
const fps = { detect: 0, draw: 0 }; // holds calculated fps information for both detect and screen refresh
let startTime = 0;
const log = (...msg) => { // helper method to output messages
dom.log.innerText += msg.join(' ') + '\n';
// eslint-disable-next-line no-console
console.log(...msg);
};
const printFPS = (msg) => dom.fps.innerText = msg; // print status element
const printStatus = (msg) => dom.status.innerText = 'status: ' + JSON.stringify(msg).replace(/"|{|}/g, '').replace(/,/g, ' | '); // print status element
async function webCam() { // initialize webcam
printFPS('starting webcam...');
// @ts-ignore resizeMode is not yet defined in tslib
const cameraOptions: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };
const stream: MediaStream = await navigator.mediaDevices.getUserMedia(cameraOptions);
const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });
dom.video.srcObject = stream;
dom.video.play();
await ready;
dom.canvas.width = dom.video.videoWidth;
dom.canvas.height = dom.video.videoHeight;
const track: MediaStreamTrack = stream.getVideoTracks()[0];
const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';
const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';
const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';
log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });
dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click
if (dom.video.paused) dom.video.play();
else dom.video.pause();
};
}
async function detectionLoop() { // main detection loop
if (!dom.video.paused) {
await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result
const now = human.now();
fps.detect = 1000 / (now - timestamp.detect);
timestamp.detect = now;
requestAnimationFrame(detectionLoop); // start new frame immediately
}
}
const ok = { // must meet all rules
faceCount: false,
faceConfidence: false,
facingCenter: false,
eyesOpen: false,
blinkDetected: false,
faceSize: false,
antispoofCheck: false,
livenessCheck: false,
elapsedMs: 0,
};
const allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.faceConfidence && ok.antispoofCheck;
async function validationLoop(): Promise<typeof human.result.face> { // main screen refresh loop
const interpolated = await human.next(human.result); // smoothen result using last-known results
await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen
await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.
const now = human.now();
fps.draw = 1000 / (now - timestamp.draw);
timestamp.draw = now;
printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status
const gestures: string[] = Object.values(human.result.gesture).map((gesture) => gesture.gesture); // flatten all gestures
ok.faceCount = human.result.face.length === 1; // must be exactly detected face
ok.eyesOpen = ok.eyesOpen || !(gestures.includes('blink left eye') || gestures.includes('blink right eye')); // blink validation is only ok once both eyes are open
ok.blinkDetected = ok.eyesOpen && ok.blinkDetected || gestures.includes('blink left eye') || gestures.includes('blink right eye'); // need to detect blink only once
ok.facingCenter = gestures.includes('facing center') && gestures.includes('looking center'); // must face camera and look at camera
ok.faceConfidence = (human.result.face[0].boxScore || 0) > options.minConfidence && (human.result.face[0].faceScore || 0) > options.minConfidence && (human.result.face[0].genderScore || 0) > options.minConfidence;
ok.antispoofCheck = (human.result.face[0].real || 0) > options.minConfidence;
ok.faceSize = human.result.face[0].box[2] >= options.minSize && human.result.face[0].box[3] >= options.minSize;
printStatus(ok);
if (allOk()) { // all criteria met
dom.video.pause();
return human.result.face;
} else {
human.tf.dispose(human.result.face[0].tensor); // results are not ok, so lets dispose tensor
}
if (ok.elapsedMs > options.maxTime) { // give up
dom.video.pause();
return human.result.face;
} else { // run again
ok.elapsedMs = Math.trunc(human.now() - startTime);
return new Promise((resolve) => {
setTimeout(async () => {
const res = await validationLoop(); // run validation loop until conditions are met
if (res) resolve(human.result.face); // recursive promise resolve
}, 30); // use to slow down refresh from max refresh rate to target of 30 fps
});
}
}
async function detectFace(face) {
// draw face and dispose face tensor immediatey afterwards
dom.canvas.width = face.tensor.shape[2];
dom.canvas.height = face.tensor.shape[1];
dom.canvas.style.width = '';
human.tf.browser.toPixels(face.tensor, dom.canvas);
human.tf.dispose(face.tensor);
// run detection using human.match and use face.embedding as input descriptor
// tbd
}
async function main() { // main entry point
log('human version:', human.version, '| tfjs version:', human.tf.version_core);
printFPS('loading...');
await human.load(); // preload all models
printFPS('initializing...');
await human.warmup(); // warmup function to initialize backend for future faster detection
await webCam(); // start webcam
await detectionLoop(); // start detection loop
startTime = human.now();
const face = await validationLoop(); // start validation loop
if (!allOk()) log('did not find valid input', face);
else {
log('found valid face', face);
await detectFace(face[0]);
}
dom.fps.style.display = 'none';
}
window.onload = main;

View File

@ -26,6 +26,5 @@
<pre id="status" style="position: absolute; top: 12px; right: 20px; background-color: grey; padding: 8px; box-shadow: 2px 2px black"></pre> <pre id="status" style="position: absolute; top: 12px; right: 20px; background-color: grey; padding: 8px; box-shadow: 2px 2px black"></pre>
<pre id="log" style="padding: 8px"></pre> <pre id="log" style="padding: 8px"></pre>
<div id="performance" style="position: absolute; bottom: 0; width: 100%; padding: 8px; font-size: 0.8rem;"></div> <div id="performance" style="position: absolute; bottom: 0; width: 100%; padding: 8px; font-size: 0.8rem;"></div>
<canvas id="test" style="position: absolute; bottom: 0; right: 0; width: 30%"></canvas>
</body> </body>
</html> </html>

View File

@ -1003,7 +1003,7 @@ async function process2(input, config3, getTensor = true) {
if (input["isDisposedInternal"]) { if (input["isDisposedInternal"]) {
throw new Error("input tensor is disposed"); throw new Error("input tensor is disposed");
} else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) { } else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) {
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input["shape"]}`); throw new Error("input tensor shape must be [1, height, width, 3] and instead was" + (input["shape"] ? input["shape"].toString() : "unknown"));
} else { } else {
return { tensor: tfjs_esm_exports.clone(input), canvas: config3.filter.return ? outCanvas : null }; return { tensor: tfjs_esm_exports.clone(input), canvas: config3.filter.return ? outCanvas : null };
} }
@ -5654,7 +5654,6 @@ async function predict6(input, config3) {
if (!((_e = config3.face.mesh) == null ? void 0 : _e.enabled)) { if (!((_e = config3.face.mesh) == null ? void 0 : _e.enabled)) {
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
face5.score = face5.boxScore; face5.score = face5.boxScore;
face5.mesh = box4.landmarks.map((pt) => [ face5.mesh = box4.landmarks.map((pt) => [
(box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(), (box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(),
@ -5682,7 +5681,7 @@ async function predict6(input, config3) {
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]); face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index2) => face5.mesh[index2]); face5.annotations[key] = meshAnnotations[key].map((index2) => face5.mesh[index2]);
box4 = squarifyBox(enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact)); box4 = squarifyBox({ ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact), confidence: box4.confidence });
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.score = face5.faceScore; face5.score = face5.faceScore;

File diff suppressed because one or more lines are too long

5
dist/human.esm.js vendored
View File

@ -71328,7 +71328,7 @@ async function process2(input2, config3, getTensor2 = true) {
if (input2["isDisposedInternal"]) { if (input2["isDisposedInternal"]) {
throw new Error("input tensor is disposed"); throw new Error("input tensor is disposed");
} else if (!input2.shape || input2.shape.length !== 4 || input2.shape[0] !== 1 || input2.shape[3] !== 3) { } else if (!input2.shape || input2.shape.length !== 4 || input2.shape[0] !== 1 || input2.shape[3] !== 3) {
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input2["shape"]}`); throw new Error("input tensor shape must be [1, height, width, 3] and instead was" + (input2["shape"] ? input2["shape"].toString() : "unknown"));
} else { } else {
return { tensor: clone(input2), canvas: config3.filter.return ? outCanvas : null }; return { tensor: clone(input2), canvas: config3.filter.return ? outCanvas : null };
} }
@ -75979,7 +75979,6 @@ async function predict6(input2, config3) {
if (!((_e = config3.face.mesh) == null ? void 0 : _e.enabled)) { if (!((_e = config3.face.mesh) == null ? void 0 : _e.enabled)) {
face5.box = getClampedBox(box4, input2); face5.box = getClampedBox(box4, input2);
face5.boxRaw = getRawBox(box4, input2); face5.boxRaw = getRawBox(box4, input2);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
face5.score = face5.boxScore; face5.score = face5.boxScore;
face5.mesh = box4.landmarks.map((pt) => [ face5.mesh = box4.landmarks.map((pt) => [
(box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(), (box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(),
@ -76007,7 +76006,7 @@ async function predict6(input2, config3) {
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input2.shape[2] || 0), pt[1] / (input2.shape[1] || 0), (pt[2] || 0) / inputSize5]); face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input2.shape[2] || 0), pt[1] / (input2.shape[1] || 0), (pt[2] || 0) / inputSize5]);
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index2) => face5.mesh[index2]); face5.annotations[key] = meshAnnotations[key].map((index2) => face5.mesh[index2]);
box4 = squarifyBox(enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact)); box4 = squarifyBox({ ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact), confidence: box4.confidence });
face5.box = getClampedBox(box4, input2); face5.box = getClampedBox(box4, input2);
face5.boxRaw = getRawBox(box4, input2); face5.boxRaw = getRawBox(box4, input2);
face5.score = face5.faceScore; face5.score = face5.faceScore;

File diff suppressed because one or more lines are too long

4
dist/human.js vendored

File diff suppressed because one or more lines are too long

View File

@ -1013,7 +1013,7 @@ async function process2(input, config3, getTensor = true) {
if (input["isDisposedInternal"]) { if (input["isDisposedInternal"]) {
throw new Error("input tensor is disposed"); throw new Error("input tensor is disposed");
} else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) { } else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) {
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input["shape"]}`); throw new Error("input tensor shape must be [1, height, width, 3] and instead was" + (input["shape"] ? input["shape"].toString() : "unknown"));
} else { } else {
return { tensor: tf2.clone(input), canvas: config3.filter.return ? outCanvas : null }; return { tensor: tf2.clone(input), canvas: config3.filter.return ? outCanvas : null };
} }
@ -5692,7 +5692,6 @@ async function predict6(input, config3) {
if (!((_e = config3.face.mesh) == null ? void 0 : _e.enabled)) { if (!((_e = config3.face.mesh) == null ? void 0 : _e.enabled)) {
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
face5.score = face5.boxScore; face5.score = face5.boxScore;
face5.mesh = box4.landmarks.map((pt) => [ face5.mesh = box4.landmarks.map((pt) => [
(box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(), (box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(),
@ -5720,7 +5719,7 @@ async function predict6(input, config3) {
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]); face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index2) => face5.mesh[index2]); face5.annotations[key] = meshAnnotations[key].map((index2) => face5.mesh[index2]);
box4 = squarifyBox(enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact)); box4 = squarifyBox({ ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact), confidence: box4.confidence });
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.score = face5.faceScore; face5.score = face5.faceScore;

View File

@ -1014,7 +1014,7 @@ async function process2(input, config3, getTensor = true) {
if (input["isDisposedInternal"]) { if (input["isDisposedInternal"]) {
throw new Error("input tensor is disposed"); throw new Error("input tensor is disposed");
} else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) { } else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) {
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input["shape"]}`); throw new Error("input tensor shape must be [1, height, width, 3] and instead was" + (input["shape"] ? input["shape"].toString() : "unknown"));
} else { } else {
return { tensor: tf2.clone(input), canvas: config3.filter.return ? outCanvas : null }; return { tensor: tf2.clone(input), canvas: config3.filter.return ? outCanvas : null };
} }
@ -5693,7 +5693,6 @@ async function predict6(input, config3) {
if (!((_e = config3.face.mesh) == null ? void 0 : _e.enabled)) { if (!((_e = config3.face.mesh) == null ? void 0 : _e.enabled)) {
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
face5.score = face5.boxScore; face5.score = face5.boxScore;
face5.mesh = box4.landmarks.map((pt) => [ face5.mesh = box4.landmarks.map((pt) => [
(box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(), (box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(),
@ -5721,7 +5720,7 @@ async function predict6(input, config3) {
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]); face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index2) => face5.mesh[index2]); face5.annotations[key] = meshAnnotations[key].map((index2) => face5.mesh[index2]);
box4 = squarifyBox(enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact)); box4 = squarifyBox({ ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact), confidence: box4.confidence });
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.score = face5.faceScore; face5.score = face5.faceScore;

5
dist/human.node.js vendored
View File

@ -1013,7 +1013,7 @@ async function process2(input, config3, getTensor = true) {
if (input["isDisposedInternal"]) { if (input["isDisposedInternal"]) {
throw new Error("input tensor is disposed"); throw new Error("input tensor is disposed");
} else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) { } else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) {
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input["shape"]}`); throw new Error("input tensor shape must be [1, height, width, 3] and instead was" + (input["shape"] ? input["shape"].toString() : "unknown"));
} else { } else {
return { tensor: tf2.clone(input), canvas: config3.filter.return ? outCanvas : null }; return { tensor: tf2.clone(input), canvas: config3.filter.return ? outCanvas : null };
} }
@ -5692,7 +5692,6 @@ async function predict6(input, config3) {
if (!((_e = config3.face.mesh) == null ? void 0 : _e.enabled)) { if (!((_e = config3.face.mesh) == null ? void 0 : _e.enabled)) {
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
face5.score = face5.boxScore; face5.score = face5.boxScore;
face5.mesh = box4.landmarks.map((pt) => [ face5.mesh = box4.landmarks.map((pt) => [
(box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(), (box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(),
@ -5720,7 +5719,7 @@ async function predict6(input, config3) {
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]); face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index2) => face5.mesh[index2]); face5.annotations[key] = meshAnnotations[key].map((index2) => face5.mesh[index2]);
box4 = squarifyBox(enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact)); box4 = squarifyBox({ ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact), confidence: box4.confidence });
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.score = face5.faceScore; face5.score = face5.faceScore;

BIN
models/liveness.bin Normal file

Binary file not shown.

79
models/liveness.json Normal file
View File

@ -0,0 +1,79 @@
{
"format": "graph-model",
"generatedBy": "https://github.com/leokwu/livenessnet",
"convertedBy": "https://github.com/vladmandic",
"signature":
{
"inputs":
{
"conv2d_1_input": {"name":"conv2d_1_input:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"32"},{"size":"32"},{"size":"3"}]}}
},
"outputs":
{
"activation_6": {"name":"Identity:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"2"}]}}
}
},
"modelTopology":
{
"node":
[
{"name":"StatefulPartitionedCall/sequential_1/conv2d_1/Conv2D/ReadVariableOp","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"3"},{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/conv2d_1/BiasAdd/ReadVariableOp","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/conv2d_2/Conv2D/ReadVariableOp","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"16"},{"size":"16"}]}}}}},
{"name":"StatefulPartitionedCall/sequential_1/conv2d_2/BiasAdd/ReadVariableOp","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/conv2d_3/Conv2D/ReadVariableOp","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"16"},{"size":"32"}]}}}}},
{"name":"StatefulPartitionedCall/sequential_1/conv2d_3/BiasAdd/ReadVariableOp","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/conv2d_4/Conv2D/ReadVariableOp","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"32"}]}}}}},
{"name":"StatefulPartitionedCall/sequential_1/conv2d_4/BiasAdd/ReadVariableOp","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/flatten_1/Const","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}},
{"name":"StatefulPartitionedCall/sequential_1/dense_1/MatMul/ReadVariableOp","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"2048"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/dense_1/BiasAdd/ReadVariableOp","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_5/batchnorm/mul","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_5/batchnorm/sub","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}}}},
{"name":"StatefulPartitionedCall/sequential_1/dense_2/MatMul/ReadVariableOp","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"},{"size":"2"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/dense_2/BiasAdd/ReadVariableOp","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"2"}]}}}}},
{"name":"conv2d_1_input","op":"Placeholder","attr":{"shape":{"shape":{"dim":[{"size":"-1"},{"size":"32"},{"size":"32"},{"size":"3"}]}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_2/FusedBatchNormV3/Scaled","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_2/FusedBatchNormV3/Offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_1/FusedBatchNormV3/Scaled","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_1/FusedBatchNormV3/Offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_4/FusedBatchNormV3/Scaled","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_4/FusedBatchNormV3/Offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_3/FusedBatchNormV3/Scaled","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_3/FusedBatchNormV3/Offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
{"name":"StatefulPartitionedCall/sequential_1/activation_1/Relu","op":"_FusedConv2D","input":["conv2d_1_input","StatefulPartitionedCall/sequential_1/conv2d_1/Conv2D/ReadVariableOp","StatefulPartitionedCall/sequential_1/conv2d_1/BiasAdd/ReadVariableOp"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_1/FusedBatchNormV3/Mul","op":"Mul","input":["StatefulPartitionedCall/sequential_1/activation_1/Relu","StatefulPartitionedCall/sequential_1/batch_normalization_1/FusedBatchNormV3/Scaled"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_1/FusedBatchNormV3","op":"Add","input":["StatefulPartitionedCall/sequential_1/batch_normalization_1/FusedBatchNormV3/Mul","StatefulPartitionedCall/sequential_1/batch_normalization_1/FusedBatchNormV3/Offset"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/activation_2/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/sequential_1/batch_normalization_1/FusedBatchNormV3","StatefulPartitionedCall/sequential_1/conv2d_2/Conv2D/ReadVariableOp","StatefulPartitionedCall/sequential_1/conv2d_2/BiasAdd/ReadVariableOp"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"padding":{"s":"U0FNRQ=="},"use_cudnn_on_gpu":{"b":true}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_2/FusedBatchNormV3/Mul","op":"Mul","input":["StatefulPartitionedCall/sequential_1/activation_2/Relu","StatefulPartitionedCall/sequential_1/batch_normalization_2/FusedBatchNormV3/Scaled"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_2/FusedBatchNormV3","op":"Add","input":["StatefulPartitionedCall/sequential_1/batch_normalization_2/FusedBatchNormV3/Mul","StatefulPartitionedCall/sequential_1/batch_normalization_2/FusedBatchNormV3/Offset"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/max_pooling2d_1/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/sequential_1/batch_normalization_2/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"padding":{"s":"VkFMSUQ="},"ksize":{"list":{"i":["1","2","2","1"]}},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","2","2","1"]}}}},
{"name":"StatefulPartitionedCall/sequential_1/activation_3/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/sequential_1/max_pooling2d_1/MaxPool","StatefulPartitionedCall/sequential_1/conv2d_3/Conv2D/ReadVariableOp","StatefulPartitionedCall/sequential_1/conv2d_3/BiasAdd/ReadVariableOp"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_3/FusedBatchNormV3/Mul","op":"Mul","input":["StatefulPartitionedCall/sequential_1/activation_3/Relu","StatefulPartitionedCall/sequential_1/batch_normalization_3/FusedBatchNormV3/Scaled"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_3/FusedBatchNormV3","op":"Add","input":["StatefulPartitionedCall/sequential_1/batch_normalization_3/FusedBatchNormV3/Mul","StatefulPartitionedCall/sequential_1/batch_normalization_3/FusedBatchNormV3/Offset"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/activation_4/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/sequential_1/batch_normalization_3/FusedBatchNormV3","StatefulPartitionedCall/sequential_1/conv2d_4/Conv2D/ReadVariableOp","StatefulPartitionedCall/sequential_1/conv2d_4/BiasAdd/ReadVariableOp"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"padding":{"s":"U0FNRQ=="},"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"dilations":{"list":{"i":["1","1","1","1"]}}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_4/FusedBatchNormV3/Mul","op":"Mul","input":["StatefulPartitionedCall/sequential_1/activation_4/Relu","StatefulPartitionedCall/sequential_1/batch_normalization_4/FusedBatchNormV3/Scaled"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_4/FusedBatchNormV3","op":"Add","input":["StatefulPartitionedCall/sequential_1/batch_normalization_4/FusedBatchNormV3/Mul","StatefulPartitionedCall/sequential_1/batch_normalization_4/FusedBatchNormV3/Offset"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/max_pooling2d_2/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/sequential_1/batch_normalization_4/FusedBatchNormV3"],"attr":{"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"padding":{"s":"VkFMSUQ="},"ksize":{"list":{"i":["1","2","2","1"]}},"strides":{"list":{"i":["1","2","2","1"]}}}},
{"name":"StatefulPartitionedCall/sequential_1/flatten_1/Reshape","op":"Reshape","input":["StatefulPartitionedCall/sequential_1/max_pooling2d_2/MaxPool","StatefulPartitionedCall/sequential_1/flatten_1/Const"],"attr":{"T":{"type":"DT_FLOAT"},"Tshape":{"type":"DT_INT32"}}},
{"name":"StatefulPartitionedCall/sequential_1/activation_5/Relu","op":"_FusedMatMul","input":["StatefulPartitionedCall/sequential_1/flatten_1/Reshape","StatefulPartitionedCall/sequential_1/dense_1/MatMul/ReadVariableOp","StatefulPartitionedCall/sequential_1/dense_1/BiasAdd/ReadVariableOp"],"device":"/device:CPU:0","attr":{"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"transpose_b":{"b":false},"num_args":{"i":"1"},"transpose_a":{"b":false}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_5/batchnorm/mul_1","op":"Mul","input":["StatefulPartitionedCall/sequential_1/activation_5/Relu","StatefulPartitionedCall/sequential_1/batch_normalization_5/batchnorm/mul"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_5/batchnorm/add_1","op":"AddV2","input":["StatefulPartitionedCall/sequential_1/batch_normalization_5/batchnorm/mul_1","StatefulPartitionedCall/sequential_1/batch_normalization_5/batchnorm/sub"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/dense_2/BiasAdd","op":"_FusedMatMul","input":["StatefulPartitionedCall/sequential_1/batch_normalization_5/batchnorm/add_1","StatefulPartitionedCall/sequential_1/dense_2/MatMul/ReadVariableOp","StatefulPartitionedCall/sequential_1/dense_2/BiasAdd/ReadVariableOp"],"device":"/device:CPU:0","attr":{"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"transpose_a":{"b":false},"epsilon":{"f":0},"transpose_b":{"b":false},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential_1/activation_6/Softmax","op":"Softmax","input":["StatefulPartitionedCall/sequential_1/dense_2/BiasAdd"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"Identity","op":"Identity","input":["StatefulPartitionedCall/sequential_1/activation_6/Softmax"],"attr":{"T":{"type":"DT_FLOAT"}}}
],
"library": {},
"versions":
{
"producer": 808
}
},
"weightsManifest":
[
{
"paths": ["liveness.bin"],
"weights": [{"name":"StatefulPartitionedCall/sequential_1/conv2d_1/Conv2D/ReadVariableOp","shape":[3,3,3,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/conv2d_1/BiasAdd/ReadVariableOp","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/conv2d_2/Conv2D/ReadVariableOp","shape":[3,3,16,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/conv2d_2/BiasAdd/ReadVariableOp","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/conv2d_3/Conv2D/ReadVariableOp","shape":[3,3,16,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/conv2d_3/BiasAdd/ReadVariableOp","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/conv2d_4/Conv2D/ReadVariableOp","shape":[3,3,32,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/conv2d_4/BiasAdd/ReadVariableOp","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/flatten_1/Const","shape":[2],"dtype":"int32"},{"name":"StatefulPartitionedCall/sequential_1/dense_1/MatMul/ReadVariableOp","shape":[2048,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/dense_1/BiasAdd/ReadVariableOp","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_5/batchnorm/mul","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_5/batchnorm/sub","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/dense_2/MatMul/ReadVariableOp","shape":[64,2],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/dense_2/BiasAdd/ReadVariableOp","shape":[2],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_2/FusedBatchNormV3/Scaled","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_2/FusedBatchNormV3/Offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_1/FusedBatchNormV3/Scaled","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_1/FusedBatchNormV3/Offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_4/FusedBatchNormV3/Scaled","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_4/FusedBatchNormV3/Offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_3/FusedBatchNormV3/Scaled","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/sequential_1/batch_normalization_3/FusedBatchNormV3/Offset","shape":[32],"dtype":"float32"}]
}
]
}

View File

@ -78,7 +78,6 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
if (!config.face.mesh?.enabled) { // mesh not enabled, return resuts from detector only if (!config.face.mesh?.enabled) { // mesh not enabled, return resuts from detector only
face.box = util.getClampedBox(box, input); face.box = util.getClampedBox(box, input);
face.boxRaw = util.getRawBox(box, input); face.boxRaw = util.getRawBox(box, input);
face.boxScore = Math.round(100 * box.confidence || 0) / 100;
face.score = face.boxScore; face.score = face.boxScore;
face.mesh = box.landmarks.map((pt) => [ face.mesh = box.landmarks.map((pt) => [
((box.startPoint[0] + box.endPoint[0])) / 2 + ((box.endPoint[0] + box.startPoint[0]) * pt[0] / blazeface.size()), ((box.startPoint[0] + box.endPoint[0])) / 2 + ((box.endPoint[0] + box.startPoint[0]) * pt[0] / blazeface.size()),
@ -102,7 +101,7 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
face.mesh = util.transformRawCoords(rawCoords, box, angle, rotationMatrix, inputSize); // get processed mesh face.mesh = util.transformRawCoords(rawCoords, box, angle, rotationMatrix, inputSize); // get processed mesh
face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]); face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]);
for (const key of Object.keys(coords.meshAnnotations)) face.annotations[key] = coords.meshAnnotations[key].map((index) => face.mesh[index]); // add annotations for (const key of Object.keys(coords.meshAnnotations)) face.annotations[key] = coords.meshAnnotations[key].map((index) => face.mesh[index]); // add annotations
box = util.squarifyBox(util.enlargeBox(util.calculateLandmarksBoundingBox(face.mesh), enlargeFact)); // redefine box with mesh calculated one box = util.squarifyBox({ ...util.enlargeBox(util.calculateLandmarksBoundingBox(face.mesh), enlargeFact), confidence: box.confidence }); // redefine box with mesh calculated one
face.box = util.getClampedBox(box, input); // update detected box with box around the face mesh face.box = util.getClampedBox(box, input); // update detected box with box around the face mesh
face.boxRaw = util.getRawBox(box, input); face.boxRaw = util.getRawBox(box, input);
face.score = face.faceScore; face.score = face.faceScore;

View File

@ -82,7 +82,7 @@ export async function process(input: Input, config: Config, getTensor: boolean =
if ((input)['isDisposedInternal']) { if ((input)['isDisposedInternal']) {
throw new Error('input tensor is disposed'); throw new Error('input tensor is disposed');
} else if (!(input as Tensor).shape || (input as Tensor).shape.length !== 4 || (input as Tensor).shape[0] !== 1 || (input as Tensor).shape[3] !== 3) { } else if (!(input as Tensor).shape || (input as Tensor).shape.length !== 4 || (input as Tensor).shape[0] !== 1 || (input as Tensor).shape[3] !== 3) {
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input['shape']}`); throw new Error('input tensor shape must be [1, height, width, 3] and instead was' + (input['shape'] ? input['shape'].toString() : 'unknown'));
} else { } else {
return { tensor: tf.clone(input), canvas: (config.filter.return ? outCanvas : null) }; return { tensor: tf.clone(input), canvas: (config.filter.return ? outCanvas : null) };
} }

View File

@ -1,25 +1,26 @@
2021-11-08 16:36:28 INFO:  @vladmandic/human version 2.5.1 2021-11-09 10:37:39 INFO:  @vladmandic/human version 2.5.1
2021-11-08 16:36:28 INFO:  User: vlado Platform: linux Arch: x64 Node: v17.0.1 2021-11-09 10:37:39 INFO:  User: vlado Platform: linux Arch: x64 Node: v17.0.1
2021-11-08 16:36:28 INFO:  Application: {"name":"@vladmandic/human","version":"2.5.1"} 2021-11-09 10:37:39 INFO:  Application: {"name":"@vladmandic/human","version":"2.5.1"}
2021-11-08 16:36:28 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true} 2021-11-09 10:37:39 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2021-11-08 16:36:28 INFO:  Toolchain: {"build":"0.6.3","esbuild":"0.13.12","typescript":"4.4.4","typedoc":"0.22.8","eslint":"8.2.0"} 2021-11-09 10:37:39 INFO:  Toolchain: {"build":"0.6.3","esbuild":"0.13.12","typescript":"4.4.4","typedoc":"0.22.8","eslint":"8.2.0"}
2021-11-08 16:36:28 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]} 2021-11-09 10:37:39 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2021-11-08 16:36:28 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]} 2021-11-09 10:37:39 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]}
2021-11-08 16:36:28 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275} 2021-11-09 10:37:39 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-08 16:36:28 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":56,"inputBytes":523123,"outputBytes":441937} 2021-11-09 10:37:39 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":56,"inputBytes":523132,"outputBytes":441945}
2021-11-08 16:36:28 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283} 2021-11-09 10:37:39 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-08 16:36:28 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":56,"inputBytes":523131,"outputBytes":441941} 2021-11-09 10:37:39 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":56,"inputBytes":523140,"outputBytes":441949}
2021-11-08 16:36:28 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350} 2021-11-09 10:37:39 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-08 16:36:28 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":56,"inputBytes":523198,"outputBytes":442013} 2021-11-09 10:37:39 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":56,"inputBytes":523207,"outputBytes":442021}
2021-11-08 16:36:28 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652} 2021-11-09 10:37:39 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-08 16:36:28 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2326,"outputBytes":912} 2021-11-09 10:37:39 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2326,"outputBytes":912}
2021-11-08 16:36:28 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":56,"inputBytes":522760,"outputBytes":443927} 2021-11-09 10:37:39 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":56,"inputBytes":522769,"outputBytes":443935}
2021-11-08 16:36:29 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2562703,"outputBytes":2497652} 2021-11-09 10:37:40 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2562703,"outputBytes":2497652}
2021-11-08 16:36:29 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":56,"inputBytes":3019500,"outputBytes":1612928} 2021-11-09 10:37:40 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":56,"inputBytes":3019509,"outputBytes":1612938}
2021-11-08 16:36:30 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":56,"inputBytes":3019500,"outputBytes":2947346} 2021-11-09 10:37:41 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":56,"inputBytes":3019509,"outputBytes":2947355}
2021-11-08 16:36:52 STATE: Typings: {"input":"src/human.ts","output":"types","files":49} 2021-11-09 10:38:03 STATE: Typings: {"input":"src/human.ts","output":"types","files":49}
2021-11-08 16:36:59 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":48,"generated":true} 2021-11-09 10:38:10 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":48,"generated":true}
2021-11-08 16:36:59 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5801,"outputBytes":3822} 2021-11-09 10:38:10 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5801,"outputBytes":3822}
2021-11-08 16:37:37 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":90,"errors":0,"warnings":0} 2021-11-09 10:38:10 STATE: Compile: {"name":"demo/facerecognition","format":"esm","platform":"browser","input":"demo/facerecognition/index.ts","output":"demo/facerecognition/index.js","files":1,"inputBytes":8148,"outputBytes":5851}
2021-11-08 16:37:38 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"} 2021-11-09 10:38:48 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":90,"errors":0,"warnings":0}
2021-11-08 16:37:38 INFO:  Done... 2021-11-09 10:38:49 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2021-11-09 10:38:49 INFO:  Done...