improve box rescaling for all modules

pull/233/head
Vladimir Mandic 2021-11-03 16:32:07 -04:00
parent 8638500542
commit 8e15c91e33
22 changed files with 1306 additions and 1608 deletions

View File

@ -9,11 +9,9 @@
## Changelog
### **HEAD -> main** 2021/10/31 mandic00@live.com
### **origin/main** 2021/10/31 mandic00@live.com
### **HEAD -> main** 2021/11/02 mandic00@live.com
- refactor predict with execute
- patch tfjs type defs
- start 2.5 major version
- build and docs cleanup

View File

@ -9,12 +9,7 @@ import Human from "../../dist/human.esm.js";
var config = {
modelBasePath: "../../models",
backend: "humangl",
async: true,
face: { enabled: true },
body: { enabled: true },
hand: { enabled: true },
object: { enabled: false },
gesture: { enabled: true }
async: true
};
var human = new Human(config);
human.env.perfadd = false;
@ -39,7 +34,7 @@ var perf = (msg) => {
};
async function webCam() {
status("starting webcam...");
const options = { audio: false, video: { facingMode: "user", resizeMode: "none", width: { ideal: document.body.clientWidth } } };
const options = { audio: false, video: { facingMode: "user", resizeMode: "crop-and-scale", width: { ideal: document.body.clientWidth } } };
const stream = await navigator.mediaDevices.getUserMedia(options);
const ready = new Promise((resolve) => {
dom.video.onloadeddata = () => resolve(true);

View File

@ -1,7 +1,7 @@
{
"version": 3,
"sources": ["index.ts"],
"sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary <https://github.com/vladmandic/human>\n * @author <https://github.com/vladmandic>\n * @copyright <https://github.com/vladmandic>\n * @license MIT\n */\n\n/// <reference path=\"../../types/src//human.d.ts\" />\n\nimport Human from '../../dist/human.esm.js'; // equivalent of @vladmandic/human\n\nconst config = {\n modelBasePath: '../../models',\n backend: 'humangl',\n async: true,\n face: { enabled: true },\n body: { enabled: true },\n hand: { enabled: true },\n object: { enabled: false },\n gesture: { enabled: true },\n};\n\nconst human = new Human(config);\nhuman.env.perfadd = false;\nlet result;\n\nconst dom = {\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('status') as HTMLPreElement,\n perf: document.getElementById('performance') as HTMLDivElement,\n};\n\nconst fps = { detect: 0, draw: 0 };\n\nconst log = (...msg) => {\n dom.log.innerText += msg.join(' ') + '\\n';\n // eslint-disable-next-line no-console\n console.log(...msg);\n};\nconst status = (msg) => {\n dom.fps.innerText = msg;\n};\nconst perf = (msg) => {\n dom.perf.innerText = 'performance: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | ');\n};\n\nasync function webCam() {\n status('starting webcam...');\n const options = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n const track: MediaStreamTrack = stream.getVideoTracks()[0];\n const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';\n const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';\n const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';\n log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });\n dom.canvas.onclick = () => {\n if (dom.video.paused) dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() {\n const t0 = human.now();\n if (!dom.video.paused) {\n result = await human.detect(dom.video);\n }\n const t1 = human.now();\n fps.detect = 1000 / (t1 - t0);\n requestAnimationFrame(detectionLoop);\n}\n\nasync function drawLoop() {\n const t0 = human.now();\n if (!dom.video.paused) {\n const interpolated = await human.next(result);\n await human.draw.canvas(dom.video, dom.canvas);\n await human.draw.all(dom.canvas, interpolated);\n perf(interpolated.performance);\n }\n const t1 = human.now();\n fps.draw = 1000 / (t1 - t0);\n status(dom.video.paused ? 'paused' : `fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect / ${fps.draw.toFixed(1).padStart(5, ' ')} draw`);\n requestAnimationFrame(drawLoop);\n}\n\nasync function main() {\n log('human version:', human.version, 'tfjs:', human.tf.version_core);\n log('platform:', human.env.platform, 'agent:', human.env.agent);\n status('loading...');\n await human.load();\n status('initializing...');\n log('backend:', human.tf.getBackend(), 'available:', human.env.backends);\n await human.warmup();\n await webCam();\n await detectionLoop();\n await drawLoop();\n}\n\nwindow.onload = main;\n"],
"mappings": ";;;;;;;AAWA;AAXA,AAaA,IAAM,SAAS;AAAA,EACb,eAAe;AAAA,EACf,SAAS;AAAA,EACT,OAAO;AAAA,EACP,MAAM,EAAE,SAAS;AAAA,EACjB,MAAM,EAAE,SAAS;AAAA,EACjB,MAAM,EAAE,SAAS;AAAA,EACjB,QAAQ,EAAE,SAAS;AAAA,EACnB,SAAS,EAAE,SAAS;AAAA;AAGtB,IAAM,QAAQ,IAAI,MAAM;AACxB,MAAM,IAAI,UAAU;AACpB,IAAI;AAEJ,IAAM,MAAM;AAAA,EACV,OAAO,SAAS,eAAe;AAAA,EAC/B,QAAQ,SAAS,eAAe;AAAA,EAChC,KAAK,SAAS,eAAe;AAAA,EAC7B,KAAK,SAAS,eAAe;AAAA,EAC7B,MAAM,SAAS,eAAe;AAAA;AAGhC,IAAM,MAAM,EAAE,QAAQ,GAAG,MAAM;AAE/B,IAAM,MAAM,IAAI,QAAQ;AACtB,MAAI,IAAI,aAAa,IAAI,KAAK,OAAO;AAErC,UAAQ,IAAI,GAAG;AAAA;AAEjB,IAAM,SAAS,CAAC,QAAQ;AACtB,MAAI,IAAI,YAAY;AAAA;AAEtB,IAAM,OAAO,CAAC,QAAQ;AACpB,MAAI,KAAK,YAAY,kBAAkB,KAAK,UAAU,KAAK,QAAQ,UAAU,IAAI,QAAQ,MAAM;AAAA;AAGjG,wBAAwB;AACtB,SAAO;AACP,QAAM,UAAU,EAAE,OAAO,OAAO,OAAO,EAAE,YAAY,QAAQ,YAAY,QAAQ,OAAO,EAAE,OAAO,SAAS,KAAK;AAC/G,QAAM,SAAsB,MAAM,UAAU,aAAa,aAAa;AACtE,QAAM,QAAQ,IAAI,QAAQ,CAAC,YAAY;AAAE,QAAI,MAAM,eAAe,MAAM,QAAQ;AAAA;AAChF,MAAI,MAAM,YAAY;AACtB,MAAI,MAAM;AACV,QAAM;AACN,MAAI,OAAO,QAAQ,IAAI,MAAM;AAC7B,MAAI,OAAO,SAAS,IAAI,MAAM;AAC9B,QAAM,QAA0B,OAAO,iBAAiB;AACxD,QAAM,eAAgD,MAAM,kBAAkB,MAAM,oBAAoB;AACxG,QAAM,WAAwC,MAAM,cAAc,MAAM,gBAAgB;AACxF,QAAM,cAA8C,MAAM,iBAAiB,MAAM,mBAAmB;AACpG,MAAI,UAAU,IAAI,MAAM,YAAY,IAAI,MAAM,aAAa,MAAM,OAAO,EAAE,QAAQ,OAAO,UAAU,aAAa;AAChH,MAAI,OAAO,UAAU,MAAM;AACzB,QAAI,IAAI,MAAM;AAAQ,UAAI,MAAM;AAAA;AAC3B,UAAI,MAAM;AAAA;AAAA;AAInB,+BAA+B;AAC7B,QAAM,KAAK,MAAM;AACjB,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,aAAS,MAAM,MAAM,OAAO,IAAI;AAAA;AAElC,QAAM,KAAK,MAAM;AACjB,MAAI,SAAS,MAAQ,MAAK;AAC1B,wBAAsB;AAAA;AAGxB,0BAA0B;AACxB,QAAM,KAAK,MAAM;AACjB,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,UAAM,eAAe,MAAM,MAAM,KAAK;AACtC,UAAM,MAAM,KAAK,OAAO,IAAI,OAAO,IAAI;AACvC,UAAM,MAAM,KAAK,IAAI,IAAI,QAAQ;AACjC,SAAK,aAAa;AAAA;AAEpB,QAAM,KAAK,MAAM;AACjB,MAAI,OAAO,MAAQ,MAAK;AACxB,SAAO,IAAI,MAAM,SAAS,WAAW,QAAQ,IAAI,OAAO,QAAQ,GAAG,SAAS,GAAG,iBAAiB,IAAI,KAAK,QAAQ,GAAG,SAAS,GAAG;AAChI,wBAAsB;AAAA;AAGxB,sBAAsB;AACpB,MAAI,kBAAkB,MAAM,SAAS,SAAS,MAAM,GAAG;AACvD,MAAI,aAAa,MAAM,IAAI,UAAU,UAAU,MAAM,IAAI;AACzD,SAAO;AACP,QAAM,MAAM;AACZ,SAAO;AACP,MAAI,YAAY,MAAM,GAAG,cAAc,cAAc,MAAM,IAAI;AAC/D,QAAM,MAAM;AACZ,QAAM;AACN,QAAM;AACN,QAAM;AAAA;AAGR,OAAO,SAAS;",
"sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary <https://github.com/vladmandic/human>\n * @author <https://github.com/vladmandic>\n * @copyright <https://github.com/vladmandic>\n * @license MIT\n */\n\n/// <reference path=\"../../types/src//human.d.ts\" />\n\nimport Human from '../../dist/human.esm.js'; // equivalent of @vladmandic/human\n\nconst config = {\n modelBasePath: '../../models',\n backend: 'humangl',\n async: true,\n // face: { enabled: true, detector: { rotation: true }, iris: { enabled: false }, description: { enabled: false }, emotion: { enabled: false } },\n // body: { enabled: false },\n // hand: { enabled: false },\n // object: { enabled: false },\n // gesture: { enabled: true },\n};\n\nconst human = new Human(config);\nhuman.env.perfadd = false;\nlet result;\n\nconst dom = {\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('status') as HTMLPreElement,\n perf: document.getElementById('performance') as HTMLDivElement,\n};\n\nconst fps = { detect: 0, draw: 0 };\n\nconst log = (...msg) => {\n dom.log.innerText += msg.join(' ') + '\\n';\n // eslint-disable-next-line no-console\n console.log(...msg);\n};\nconst status = (msg) => {\n dom.fps.innerText = msg;\n};\nconst perf = (msg) => {\n dom.perf.innerText = 'performance: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | ');\n};\n\nasync function webCam() {\n status('starting webcam...');\n const options = { audio: false, video: { facingMode: 'user', resizeMode: 'crop-and-scale', width: { ideal: document.body.clientWidth } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n const track: MediaStreamTrack = stream.getVideoTracks()[0];\n const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';\n const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';\n const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';\n log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });\n dom.canvas.onclick = () => {\n if (dom.video.paused) dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() {\n const t0 = human.now();\n if (!dom.video.paused) {\n result = await human.detect(dom.video);\n }\n const t1 = human.now();\n fps.detect = 1000 / (t1 - t0);\n requestAnimationFrame(detectionLoop);\n}\n\nasync function drawLoop() {\n const t0 = human.now();\n if (!dom.video.paused) {\n const interpolated = await human.next(result);\n await human.draw.canvas(dom.video, dom.canvas);\n await human.draw.all(dom.canvas, interpolated);\n perf(interpolated.performance);\n }\n const t1 = human.now();\n fps.draw = 1000 / (t1 - t0);\n status(dom.video.paused ? 'paused' : `fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect / ${fps.draw.toFixed(1).padStart(5, ' ')} draw`);\n requestAnimationFrame(drawLoop);\n}\n\nasync function main() {\n log('human version:', human.version, 'tfjs:', human.tf.version_core);\n log('platform:', human.env.platform, 'agent:', human.env.agent);\n status('loading...');\n await human.load();\n status('initializing...');\n log('backend:', human.tf.getBackend(), 'available:', human.env.backends);\n await human.warmup();\n await webCam();\n await detectionLoop();\n await drawLoop();\n}\n\nwindow.onload = main;\n"],
"mappings": ";;;;;;;AAWA;AAXA,AAaA,IAAM,SAAS;AAAA,EACb,eAAe;AAAA,EACf,SAAS;AAAA,EACT,OAAO;AAAA;AAQT,IAAM,QAAQ,IAAI,MAAM;AACxB,MAAM,IAAI,UAAU;AACpB,IAAI;AAEJ,IAAM,MAAM;AAAA,EACV,OAAO,SAAS,eAAe;AAAA,EAC/B,QAAQ,SAAS,eAAe;AAAA,EAChC,KAAK,SAAS,eAAe;AAAA,EAC7B,KAAK,SAAS,eAAe;AAAA,EAC7B,MAAM,SAAS,eAAe;AAAA;AAGhC,IAAM,MAAM,EAAE,QAAQ,GAAG,MAAM;AAE/B,IAAM,MAAM,IAAI,QAAQ;AACtB,MAAI,IAAI,aAAa,IAAI,KAAK,OAAO;AAErC,UAAQ,IAAI,GAAG;AAAA;AAEjB,IAAM,SAAS,CAAC,QAAQ;AACtB,MAAI,IAAI,YAAY;AAAA;AAEtB,IAAM,OAAO,CAAC,QAAQ;AACpB,MAAI,KAAK,YAAY,kBAAkB,KAAK,UAAU,KAAK,QAAQ,UAAU,IAAI,QAAQ,MAAM;AAAA;AAGjG,wBAAwB;AACtB,SAAO;AACP,QAAM,UAAU,EAAE,OAAO,OAAO,OAAO,EAAE,YAAY,QAAQ,YAAY,kBAAkB,OAAO,EAAE,OAAO,SAAS,KAAK;AACzH,QAAM,SAAsB,MAAM,UAAU,aAAa,aAAa;AACtE,QAAM,QAAQ,IAAI,QAAQ,CAAC,YAAY;AAAE,QAAI,MAAM,eAAe,MAAM,QAAQ;AAAA;AAChF,MAAI,MAAM,YAAY;AACtB,MAAI,MAAM;AACV,QAAM;AACN,MAAI,OAAO,QAAQ,IAAI,MAAM;AAC7B,MAAI,OAAO,SAAS,IAAI,MAAM;AAC9B,QAAM,QAA0B,OAAO,iBAAiB;AACxD,QAAM,eAAgD,MAAM,kBAAkB,MAAM,oBAAoB;AACxG,QAAM,WAAwC,MAAM,cAAc,MAAM,gBAAgB;AACxF,QAAM,cAA8C,MAAM,iBAAiB,MAAM,mBAAmB;AACpG,MAAI,UAAU,IAAI,MAAM,YAAY,IAAI,MAAM,aAAa,MAAM,OAAO,EAAE,QAAQ,OAAO,UAAU,aAAa;AAChH,MAAI,OAAO,UAAU,MAAM;AACzB,QAAI,IAAI,MAAM;AAAQ,UAAI,MAAM;AAAA;AAC3B,UAAI,MAAM;AAAA;AAAA;AAInB,+BAA+B;AAC7B,QAAM,KAAK,MAAM;AACjB,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,aAAS,MAAM,MAAM,OAAO,IAAI;AAAA;AAElC,QAAM,KAAK,MAAM;AACjB,MAAI,SAAS,MAAQ,MAAK;AAC1B,wBAAsB;AAAA;AAGxB,0BAA0B;AACxB,QAAM,KAAK,MAAM;AACjB,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,UAAM,eAAe,MAAM,MAAM,KAAK;AACtC,UAAM,MAAM,KAAK,OAAO,IAAI,OAAO,IAAI;AACvC,UAAM,MAAM,KAAK,IAAI,IAAI,QAAQ;AACjC,SAAK,aAAa;AAAA;AAEpB,QAAM,KAAK,MAAM;AACjB,MAAI,OAAO,MAAQ,MAAK;AACxB,SAAO,IAAI,MAAM,SAAS,WAAW,QAAQ,IAAI,OAAO,QAAQ,GAAG,SAAS,GAAG,iBAAiB,IAAI,KAAK,QAAQ,GAAG,SAAS,GAAG;AAChI,wBAAsB;AAAA;AAGxB,sBAAsB;AACpB,MAAI,kBAAkB,MAAM,SAAS,SAAS,MAAM,GAAG;AACvD,MAAI,aAAa,MAAM,IAAI,UAAU,UAAU,MAAM,IAAI;AACzD,SAAO;AACP,QAAM,MAAM;AACZ,SAAO;AACP,MAAI,YAAY,MAAM,GAAG,cAAc,cAAc,MAAM,IAAI;AAC/D,QAAM,MAAM;AACZ,QAAM;AACN,QAAM;AACN,QAAM;AAAA;AAGR,OAAO,SAAS;",
"names": []
}

View File

@ -15,11 +15,11 @@ const config = {
modelBasePath: '../../models',
backend: 'humangl',
async: true,
face: { enabled: true },
body: { enabled: true },
hand: { enabled: true },
object: { enabled: false },
gesture: { enabled: true },
// face: { enabled: true, detector: { rotation: true }, iris: { enabled: false }, description: { enabled: false }, emotion: { enabled: false } },
// body: { enabled: false },
// hand: { enabled: false },
// object: { enabled: false },
// gesture: { enabled: true },
};
const human = new Human(config);
@ -50,7 +50,7 @@ const perf = (msg) => {
async function webCam() {
status('starting webcam...');
const options = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };
const options = { audio: false, video: { facingMode: 'user', resizeMode: 'crop-and-scale', width: { ideal: document.body.clientWidth } } };
const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);
const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });
dom.video.srcObject = stream;

View File

@ -4640,31 +4640,34 @@ var getRawBox = (box4, input) => box4 ? [
var scaleBoxCoordinates = (box4, factor) => {
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
return { startPoint, endPoint };
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
};
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
const h = image25.shape[1];
const w = image25.shape[2];
return tfjs_esm_exports.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const crop2 = tfjs_esm_exports.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const norm = tfjs_esm_exports.div(crop2, 255);
tfjs_esm_exports.dispose(crop2);
return norm;
};
var enlargeBox = (box4, factor = 1.5) => {
var enlargeBox = (box4, factor) => {
const center = getBoxCenter(box4);
const size2 = getBoxSize(box4);
const halfSize = [factor * size2[0] / 2, factor * size2[1] / 2];
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]], endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]], landmarks: box4.landmarks };
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]], endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]], landmarks: box4.landmarks, confidence: box4.confidence };
};
var squarifyBox = (box4) => {
const centers = getBoxCenter(box4);
const size2 = getBoxSize(box4);
const halfSize = Math.max(...size2) / 2;
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)], endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)], landmarks: box4.landmarks };
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)], endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)], landmarks: box4.landmarks, confidence: box4.confidence };
};
var calculateLandmarksBoundingBox = (landmarks) => {
const xs = landmarks.map((d) => d[0]);
const ys = landmarks.map((d) => d[1]);
return { startPoint: [Math.min(...xs), Math.min(...ys)], endPoint: [Math.max(...xs), Math.max(...ys)], landmarks };
};
var IDENTITY_MATRIX = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
var fixedRotationMatrix = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
var normalizeRadians = (angle) => angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));
var computeRotation = (point1, point2) => normalizeRadians(Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]));
var buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];
@ -4725,16 +4728,17 @@ function generateAnchors(inputSize8) {
}
return anchors4;
}
function transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize8) {
const boxSize = getBoxSize({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const coordsScaled = rawCoords.map((coord) => [
function transformRawCoords(coordsRaw, box4, angle, rotationMatrix, inputSize8) {
const boxSize = getBoxSize(box4);
const coordsScaled = coordsRaw.map((coord) => [
boxSize[0] / inputSize8 * (coord[0] - inputSize8 / 2),
boxSize[1] / inputSize8 * (coord[1] - inputSize8 / 2),
coord[2] || 0
]);
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
const coordsRotated = angle !== 0 ? coordsScaled.map((coord) => [...rotatePoint(coord, coordsRotationMatrix), coord[2]]) : coordsScaled;
const inverseRotationMatrix = angle !== 0 ? invertTransformMatrix(rotationMatrix) : IDENTITY_MATRIX;
const largeAngle = angle && angle !== 0 && Math.abs(angle) > 0.2;
const coordsRotationMatrix = largeAngle ? buildRotationMatrix(angle, [0, 0]) : fixedRotationMatrix;
const coordsRotated = largeAngle ? coordsScaled.map((coord) => [...rotatePoint(coord, coordsRotationMatrix), coord[2]]) : coordsScaled;
const inverseRotationMatrix = largeAngle ? invertTransformMatrix(rotationMatrix) : fixedRotationMatrix;
const boxCenter = [...getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint }), 1];
return coordsRotated.map((coord) => [
Math.round(coord[0] + dot(boxCenter, inverseRotationMatrix[0])),
@ -4745,14 +4749,20 @@ function transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize8)
function correctFaceRotation(box4, input, inputSize8) {
const symmetryLine = box4.landmarks.length >= meshLandmarks.count ? meshLandmarks.symmetryLine : blazeFaceLandmarks.symmetryLine;
const angle = computeRotation(box4.landmarks[symmetryLine[0]], box4.landmarks[symmetryLine[1]]);
const faceCenter = getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotated = tfjs_esm_exports.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
const rotationMatrix = buildRotationMatrix(-angle, faceCenter);
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, rotated, [inputSize8, inputSize8]);
const face5 = tfjs_esm_exports.div(cut, 255);
tfjs_esm_exports.dispose(cut);
tfjs_esm_exports.dispose(rotated);
const largeAngle = angle && angle !== 0 && Math.abs(angle) > 0.2;
let rotationMatrix;
let face5;
if (largeAngle) {
const faceCenter = getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotated = tfjs_esm_exports.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
face5 = cutBoxFromImageAndResize(box4, rotated, [inputSize8, inputSize8]);
tfjs_esm_exports.dispose(rotated);
} else {
rotationMatrix = fixedRotationMatrix;
face5 = cutBoxFromImageAndResize(box4, input, [inputSize8, inputSize8]);
}
return [angle, rotationMatrix, face5];
}
@ -5386,7 +5396,8 @@ async function predict5(image25, config3, idx, count2) {
return new Promise(async (resolve) => {
const obj = [];
if (config3.face.emotion?.enabled) {
const resize = tfjs_esm_exports.image.resizeBilinear(image25, [model6?.inputs[0].shape ? model6.inputs[0].shape[2] : 0, model6?.inputs[0].shape ? model6.inputs[0].shape[1] : 0], false);
const inputSize8 = model6?.inputs[0].shape ? model6.inputs[0].shape[2] : 0;
const resize = tfjs_esm_exports.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
const [red, green, blue] = tfjs_esm_exports.split(resize, 3, 3);
tfjs_esm_exports.dispose(resize);
const redNorm = tfjs_esm_exports.mul(red, rgb[0]);
@ -5555,35 +5566,33 @@ var model8 = null;
var inputSize5 = 0;
var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var detectedFaces = 0;
var enlargeFact = 1.6;
async function predict6(input, config3) {
const skipTime = (config3.face.detector?.skipTime || 0) > now() - lastTime6;
const skipFrame = skipped7 < (config3.face.detector?.skipFrames || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes2 = await getBoxes(input, config3);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
const possibleBoxes = await getBoxes(input, config3);
lastTime6 = now();
boxCache = [];
for (const possible of newBoxes2.boxes) {
const startPoint = await possible.box.startPoint.data();
const endPoint = await possible.box.endPoint.data();
const landmarks = await possible.landmarks.array();
boxCache.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
}
newBoxes2.boxes.forEach((prediction) => tfjs_esm_exports.dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
for (let i = 0; i < boxCache.length; i++) {
const scaledBox = scaleBoxCoordinates({ startPoint: boxCache[i].startPoint, endPoint: boxCache[i].endPoint }, newBoxes2.scaleFactor);
const enlargedBox = enlargeBox(scaledBox);
const squarifiedBox = squarifyBox(enlargedBox);
boxCache[i] = { ...squarifiedBox, confidence: boxCache[i].confidence, landmarks: boxCache[i].landmarks };
for (const possible of possibleBoxes.boxes) {
const box4 = {
startPoint: await possible.box.startPoint.data(),
endPoint: await possible.box.endPoint.data(),
landmarks: await possible.landmarks.array(),
confidence: possible.confidence
};
boxCache.push(squarifyBox(enlargeBox(scaleBoxCoordinates(box4, possibleBoxes.scaleFactor), Math.sqrt(enlargeFact))));
}
possibleBoxes.boxes.forEach((prediction) => tfjs_esm_exports.dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
skipped7 = 0;
} else {
skipped7++;
}
const faces = [];
const newBoxes = [];
const newCache = [];
let id = 0;
for (let box4 of boxCache) {
for (let i = 0; i < boxCache.length; i++) {
let box4 = boxCache[i];
let angle = 0;
let rotationMatrix;
const face5 = {
@ -5600,16 +5609,15 @@ async function predict6(input, config3) {
if (config3.face.detector?.rotation && config3.face.mesh?.enabled && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else {
rotationMatrix = IDENTITY_MATRIX;
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, config3.face.mesh?.enabled ? [inputSize5, inputSize5] : [size(), size()]);
face5.tensor = tfjs_esm_exports.div(cut, 255);
tfjs_esm_exports.dispose(cut);
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input, config3.face.mesh?.enabled ? [inputSize5, inputSize5] : [size(), size()]);
}
face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!config3.face.mesh?.enabled) {
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * box4.confidence || 0) / 100;
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
face5.score = face5.boxScore;
face5.mesh = box4.landmarks.map((pt) => [
(box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(),
(box4.startPoint[1] + box4.endPoint[1]) / 2 + (box4.endPoint[1] + box4.startPoint[1]) * pt[1] / size()
@ -5622,40 +5630,30 @@ async function predict6(input, config3) {
log("face mesh detection requested, but model is not loaded");
} else {
const [contours, confidence, contourCoords] = model8.execute(face5.tensor);
tfjs_esm_exports.dispose(contours);
const faceConfidence = (await confidence.data())[0];
tfjs_esm_exports.dispose(confidence);
const faceConfidence = await confidence.data();
face5.faceScore = Math.round(100 * faceConfidence[0]) / 100;
const coordsReshaped = tfjs_esm_exports.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tfjs_esm_exports.dispose(contourCoords);
tfjs_esm_exports.dispose(coordsReshaped);
if (faceConfidence < (config3.face.detector?.minConfidence || 1)) {
box4.confidence = faceConfidence;
tfjs_esm_exports.dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face5.faceScore < (config3.face.detector?.minConfidence || 1)) {
box4.confidence = face5.faceScore;
} else {
if (config3.face.iris?.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (config3.face.detector?.rotation && config3.face.mesh.enabled && config3.face.description?.enabled && env.kernels.includes("rotatewithoffset")) {
tfjs_esm_exports.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
}
box4 = squarifyBox(enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact));
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * faceConfidence || 100 * box4.confidence || 0) / 100;
face5.faceScore = Math.round(100 * faceConfidence) / 100;
box4 = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
face5.score = face5.faceScore;
newCache.push(box4);
}
}
faces.push(face5);
newBoxes.push(box4);
}
if (config3.face.mesh?.enabled)
boxCache = newBoxes.filter((a) => a.confidence > (config3.face.detector?.minConfidence || 0));
detectedFaces = faces.length;
boxCache = [...newCache];
return faces;
}
async function load8(config3) {
@ -5702,10 +5700,9 @@ function enhance(input) {
const tensor3 = input.image || input.tensor || input;
if (!(tensor3 instanceof Tensor))
return null;
const box4 = [[0.05, 0.15, 0.85, 0.85]];
if (!model9?.inputs[0].shape)
return null;
const crop2 = tensor3.shape.length === 3 ? tfjs_esm_exports.image.cropAndResize(tfjs_esm_exports.expandDims(tensor3, 0), box4, [0], [model9.inputs[0].shape[2], model9.inputs[0].shape[1]]) : tfjs_esm_exports.image.cropAndResize(tensor3, box4, [0], [model9.inputs[0].shape[2], model9.inputs[0].shape[1]]);
const crop2 = tfjs_esm_exports.image.resizeBilinear(tensor3, [model9.inputs[0].shape[2], model9.inputs[0].shape[1]], false);
const norm = tfjs_esm_exports.mul(crop2, 255);
return norm;
});
@ -11612,14 +11609,17 @@ var iris3 = (res) => {
center = true;
gestures.push({ iris: i, gesture: "facing center" });
}
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
const leftIrisCenterX = Math.abs(res[i].mesh[263][0] - res[i].annotations.leftEyeIris[0][0]) / res[i].box[2];
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
if (leftIrisCenterX > 0.06 || rightIrisCenterX > 0.06)
center = false;
if (leftIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking right" });
if (rightIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking left" });
if (leftIrisCenterX > rightIrisCenterX) {
if (leftIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking right" });
} else {
if (rightIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking left" });
}
const rightIrisCenterY = Math.abs(res[i].mesh[145][1] - res[i].annotations.rightEyeIris[0][1]) / res[i].box[3];
const leftIrisCenterY = Math.abs(res[i].mesh[374][1] - res[i].annotations.leftEyeIris[0][1]) / res[i].box[3];
if (leftIrisCenterY < 0.01 || rightIrisCenterY < 0.01 || leftIrisCenterY > 0.022 || rightIrisCenterY > 0.022)

File diff suppressed because one or more lines are too long

138
dist/human.esm.js vendored
View File

@ -74971,31 +74971,34 @@ var getRawBox = (box4, input2) => box4 ? [
var scaleBoxCoordinates = (box4, factor) => {
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
return { startPoint, endPoint };
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
};
var cutBoxFromImageAndResize = (box4, image7, cropSize) => {
const h = image7.shape[1];
const w = image7.shape[2];
return image.cropAndResize(image7, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const crop2 = image.cropAndResize(image7, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const norm2 = div(crop2, 255);
dispose(crop2);
return norm2;
};
var enlargeBox = (box4, factor = 1.5) => {
var enlargeBox = (box4, factor) => {
const center = getBoxCenter(box4);
const size2 = getBoxSize(box4);
const halfSize = [factor * size2[0] / 2, factor * size2[1] / 2];
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]], endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]], landmarks: box4.landmarks };
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]], endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]], landmarks: box4.landmarks, confidence: box4.confidence };
};
var squarifyBox = (box4) => {
const centers = getBoxCenter(box4);
const size2 = getBoxSize(box4);
const halfSize = Math.max(...size2) / 2;
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)], endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)], landmarks: box4.landmarks };
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)], endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)], landmarks: box4.landmarks, confidence: box4.confidence };
};
var calculateLandmarksBoundingBox = (landmarks) => {
const xs = landmarks.map((d) => d[0]);
const ys = landmarks.map((d) => d[1]);
return { startPoint: [Math.min(...xs), Math.min(...ys)], endPoint: [Math.max(...xs), Math.max(...ys)], landmarks };
};
var IDENTITY_MATRIX = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
var fixedRotationMatrix = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
var normalizeRadians = (angle) => angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));
var computeRotation = (point1, point2) => normalizeRadians(Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]));
var buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];
@ -75056,16 +75059,17 @@ function generateAnchors(inputSize8) {
}
return anchors4;
}
function transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize8) {
const boxSize = getBoxSize({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const coordsScaled = rawCoords.map((coord) => [
function transformRawCoords(coordsRaw, box4, angle, rotationMatrix, inputSize8) {
const boxSize = getBoxSize(box4);
const coordsScaled = coordsRaw.map((coord) => [
boxSize[0] / inputSize8 * (coord[0] - inputSize8 / 2),
boxSize[1] / inputSize8 * (coord[1] - inputSize8 / 2),
coord[2] || 0
]);
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
const coordsRotated = angle !== 0 ? coordsScaled.map((coord) => [...rotatePoint(coord, coordsRotationMatrix), coord[2]]) : coordsScaled;
const inverseRotationMatrix = angle !== 0 ? invertTransformMatrix(rotationMatrix) : IDENTITY_MATRIX;
const largeAngle = angle && angle !== 0 && Math.abs(angle) > 0.2;
const coordsRotationMatrix = largeAngle ? buildRotationMatrix(angle, [0, 0]) : fixedRotationMatrix;
const coordsRotated = largeAngle ? coordsScaled.map((coord) => [...rotatePoint(coord, coordsRotationMatrix), coord[2]]) : coordsScaled;
const inverseRotationMatrix = largeAngle ? invertTransformMatrix(rotationMatrix) : fixedRotationMatrix;
const boxCenter = [...getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint }), 1];
return coordsRotated.map((coord) => [
Math.round(coord[0] + dot4(boxCenter, inverseRotationMatrix[0])),
@ -75076,14 +75080,20 @@ function transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize8)
function correctFaceRotation(box4, input2, inputSize8) {
const symmetryLine = box4.landmarks.length >= meshLandmarks.count ? meshLandmarks.symmetryLine : blazeFaceLandmarks.symmetryLine;
const angle = computeRotation(box4.landmarks[symmetryLine[0]], box4.landmarks[symmetryLine[1]]);
const faceCenter = getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const faceCenterNormalized = [faceCenter[0] / input2.shape[2], faceCenter[1] / input2.shape[1]];
const rotated = image.rotateWithOffset(input2, angle, 0, faceCenterNormalized);
const rotationMatrix = buildRotationMatrix(-angle, faceCenter);
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, rotated, [inputSize8, inputSize8]);
const face5 = div(cut, 255);
dispose(cut);
dispose(rotated);
const largeAngle = angle && angle !== 0 && Math.abs(angle) > 0.2;
let rotationMatrix;
let face5;
if (largeAngle) {
const faceCenter = getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const faceCenterNormalized = [faceCenter[0] / input2.shape[2], faceCenter[1] / input2.shape[1]];
const rotated = image.rotateWithOffset(input2, angle, 0, faceCenterNormalized);
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
face5 = cutBoxFromImageAndResize(box4, rotated, [inputSize8, inputSize8]);
dispose(rotated);
} else {
rotationMatrix = fixedRotationMatrix;
face5 = cutBoxFromImageAndResize(box4, input2, [inputSize8, inputSize8]);
}
return [angle, rotationMatrix, face5];
}
@ -75717,7 +75727,8 @@ async function predict5(image7, config3, idx, count3) {
return new Promise(async (resolve) => {
const obj = [];
if (config3.face.emotion?.enabled) {
const resize = image.resizeBilinear(image7, [model7?.inputs[0].shape ? model7.inputs[0].shape[2] : 0, model7?.inputs[0].shape ? model7.inputs[0].shape[1] : 0], false);
const inputSize8 = model7?.inputs[0].shape ? model7.inputs[0].shape[2] : 0;
const resize = image.resizeBilinear(image7, [inputSize8, inputSize8], false);
const [red, green, blue] = split(resize, 3, 3);
dispose(resize);
const redNorm = mul(red, rgb[0]);
@ -75886,35 +75897,33 @@ var model9 = null;
var inputSize5 = 0;
var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var detectedFaces = 0;
var enlargeFact = 1.6;
async function predict6(input2, config3) {
const skipTime = (config3.face.detector?.skipTime || 0) > now() - lastTime6;
const skipFrame = skipped7 < (config3.face.detector?.skipFrames || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes2 = await getBoxes(input2, config3);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
const possibleBoxes = await getBoxes(input2, config3);
lastTime6 = now();
boxCache = [];
for (const possible of newBoxes2.boxes) {
const startPoint = await possible.box.startPoint.data();
const endPoint = await possible.box.endPoint.data();
const landmarks = await possible.landmarks.array();
boxCache.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
}
newBoxes2.boxes.forEach((prediction) => dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
for (let i = 0; i < boxCache.length; i++) {
const scaledBox = scaleBoxCoordinates({ startPoint: boxCache[i].startPoint, endPoint: boxCache[i].endPoint }, newBoxes2.scaleFactor);
const enlargedBox = enlargeBox(scaledBox);
const squarifiedBox = squarifyBox(enlargedBox);
boxCache[i] = { ...squarifiedBox, confidence: boxCache[i].confidence, landmarks: boxCache[i].landmarks };
for (const possible of possibleBoxes.boxes) {
const box4 = {
startPoint: await possible.box.startPoint.data(),
endPoint: await possible.box.endPoint.data(),
landmarks: await possible.landmarks.array(),
confidence: possible.confidence
};
boxCache.push(squarifyBox(enlargeBox(scaleBoxCoordinates(box4, possibleBoxes.scaleFactor), Math.sqrt(enlargeFact))));
}
possibleBoxes.boxes.forEach((prediction) => dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
skipped7 = 0;
} else {
skipped7++;
}
const faces = [];
const newBoxes = [];
const newCache = [];
let id = 0;
for (let box4 of boxCache) {
for (let i = 0; i < boxCache.length; i++) {
let box4 = boxCache[i];
let angle = 0;
let rotationMatrix;
const face5 = {
@ -75931,16 +75940,15 @@ async function predict6(input2, config3) {
if (config3.face.detector?.rotation && config3.face.mesh?.enabled && env2.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input2, inputSize5);
} else {
rotationMatrix = IDENTITY_MATRIX;
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input2, config3.face.mesh?.enabled ? [inputSize5, inputSize5] : [size(), size()]);
face5.tensor = div(cut, 255);
dispose(cut);
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input2, config3.face.mesh?.enabled ? [inputSize5, inputSize5] : [size(), size()]);
}
face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!config3.face.mesh?.enabled) {
face5.box = getClampedBox(box4, input2);
face5.boxRaw = getRawBox(box4, input2);
face5.score = Math.round(100 * box4.confidence || 0) / 100;
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
face5.score = face5.boxScore;
face5.mesh = box4.landmarks.map((pt) => [
(box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(),
(box4.startPoint[1] + box4.endPoint[1]) / 2 + (box4.endPoint[1] + box4.startPoint[1]) * pt[1] / size()
@ -75953,40 +75961,30 @@ async function predict6(input2, config3) {
log("face mesh detection requested, but model is not loaded");
} else {
const [contours, confidence, contourCoords] = model9.execute(face5.tensor);
dispose(contours);
const faceConfidence = (await confidence.data())[0];
dispose(confidence);
const faceConfidence = await confidence.data();
face5.faceScore = Math.round(100 * faceConfidence[0]) / 100;
const coordsReshaped = reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
dispose(contourCoords);
dispose(coordsReshaped);
if (faceConfidence < (config3.face.detector?.minConfidence || 1)) {
box4.confidence = faceConfidence;
dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face5.faceScore < (config3.face.detector?.minConfidence || 1)) {
box4.confidence = face5.faceScore;
} else {
if (config3.face.iris?.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input2.shape[2] || 0), pt[1] / (input2.shape[1] || 0), (pt[2] || 0) / inputSize5]);
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (config3.face.detector?.rotation && config3.face.mesh.enabled && config3.face.description?.enabled && env2.kernels.includes("rotatewithoffset")) {
dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input2, inputSize5);
}
box4 = squarifyBox(enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact));
face5.box = getClampedBox(box4, input2);
face5.boxRaw = getRawBox(box4, input2);
face5.score = Math.round(100 * faceConfidence || 100 * box4.confidence || 0) / 100;
face5.faceScore = Math.round(100 * faceConfidence) / 100;
box4 = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
face5.score = face5.faceScore;
newCache.push(box4);
}
}
faces.push(face5);
newBoxes.push(box4);
}
if (config3.face.mesh?.enabled)
boxCache = newBoxes.filter((a) => a.confidence > (config3.face.detector?.minConfidence || 0));
detectedFaces = faces.length;
boxCache = [...newCache];
return faces;
}
async function load8(config3) {
@ -76033,10 +76031,9 @@ function enhance(input2) {
const tensor2 = input2.image || input2.tensor || input2;
if (!(tensor2 instanceof Tensor))
return null;
const box4 = [[0.05, 0.15, 0.85, 0.85]];
if (!model10?.inputs[0].shape)
return null;
const crop2 = tensor2.shape.length === 3 ? image.cropAndResize(expandDims(tensor2, 0), box4, [0], [model10.inputs[0].shape[2], model10.inputs[0].shape[1]]) : image.cropAndResize(tensor2, box4, [0], [model10.inputs[0].shape[2], model10.inputs[0].shape[1]]);
const crop2 = image.resizeBilinear(tensor2, [model10.inputs[0].shape[2], model10.inputs[0].shape[1]], false);
const norm2 = mul(crop2, 255);
return norm2;
});
@ -81941,14 +81938,17 @@ var iris3 = (res) => {
center = true;
gestures.push({ iris: i, gesture: "facing center" });
}
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
const leftIrisCenterX = Math.abs(res[i].mesh[263][0] - res[i].annotations.leftEyeIris[0][0]) / res[i].box[2];
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
if (leftIrisCenterX > 0.06 || rightIrisCenterX > 0.06)
center = false;
if (leftIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking right" });
if (rightIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking left" });
if (leftIrisCenterX > rightIrisCenterX) {
if (leftIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking right" });
} else {
if (rightIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking left" });
}
const rightIrisCenterY = Math.abs(res[i].mesh[145][1] - res[i].annotations.rightEyeIris[0][1]) / res[i].box[3];
const leftIrisCenterY = Math.abs(res[i].mesh[374][1] - res[i].annotations.leftEyeIris[0][1]) / res[i].box[3];
if (leftIrisCenterY < 0.01 || rightIrisCenterY < 0.01 || leftIrisCenterY > 0.022 || rightIrisCenterY > 0.022)

File diff suppressed because one or more lines are too long

592
dist/human.js vendored

File diff suppressed because one or more lines are too long

138
dist/human.node-gpu.js vendored
View File

@ -4661,31 +4661,34 @@ var getRawBox = (box4, input) => box4 ? [
var scaleBoxCoordinates = (box4, factor) => {
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
return { startPoint, endPoint };
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
};
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
const h = image25.shape[1];
const w = image25.shape[2];
return tf5.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const crop2 = tf5.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const norm = tf5.div(crop2, 255);
tf5.dispose(crop2);
return norm;
};
var enlargeBox = (box4, factor = 1.5) => {
var enlargeBox = (box4, factor) => {
const center = getBoxCenter(box4);
const size2 = getBoxSize(box4);
const halfSize = [factor * size2[0] / 2, factor * size2[1] / 2];
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]], endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]], landmarks: box4.landmarks };
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]], endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]], landmarks: box4.landmarks, confidence: box4.confidence };
};
var squarifyBox = (box4) => {
const centers = getBoxCenter(box4);
const size2 = getBoxSize(box4);
const halfSize = Math.max(...size2) / 2;
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)], endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)], landmarks: box4.landmarks };
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)], endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)], landmarks: box4.landmarks, confidence: box4.confidence };
};
var calculateLandmarksBoundingBox = (landmarks) => {
const xs = landmarks.map((d) => d[0]);
const ys = landmarks.map((d) => d[1]);
return { startPoint: [Math.min(...xs), Math.min(...ys)], endPoint: [Math.max(...xs), Math.max(...ys)], landmarks };
};
var IDENTITY_MATRIX = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
var fixedRotationMatrix = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
var normalizeRadians = (angle) => angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));
var computeRotation = (point1, point2) => normalizeRadians(Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]));
var buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];
@ -4746,16 +4749,17 @@ function generateAnchors(inputSize8) {
}
return anchors4;
}
function transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize8) {
const boxSize = getBoxSize({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const coordsScaled = rawCoords.map((coord) => [
function transformRawCoords(coordsRaw, box4, angle, rotationMatrix, inputSize8) {
const boxSize = getBoxSize(box4);
const coordsScaled = coordsRaw.map((coord) => [
boxSize[0] / inputSize8 * (coord[0] - inputSize8 / 2),
boxSize[1] / inputSize8 * (coord[1] - inputSize8 / 2),
coord[2] || 0
]);
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
const coordsRotated = angle !== 0 ? coordsScaled.map((coord) => [...rotatePoint(coord, coordsRotationMatrix), coord[2]]) : coordsScaled;
const inverseRotationMatrix = angle !== 0 ? invertTransformMatrix(rotationMatrix) : IDENTITY_MATRIX;
const largeAngle = angle && angle !== 0 && Math.abs(angle) > 0.2;
const coordsRotationMatrix = largeAngle ? buildRotationMatrix(angle, [0, 0]) : fixedRotationMatrix;
const coordsRotated = largeAngle ? coordsScaled.map((coord) => [...rotatePoint(coord, coordsRotationMatrix), coord[2]]) : coordsScaled;
const inverseRotationMatrix = largeAngle ? invertTransformMatrix(rotationMatrix) : fixedRotationMatrix;
const boxCenter = [...getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint }), 1];
return coordsRotated.map((coord) => [
Math.round(coord[0] + dot(boxCenter, inverseRotationMatrix[0])),
@ -4766,14 +4770,20 @@ function transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize8)
function correctFaceRotation(box4, input, inputSize8) {
const symmetryLine = box4.landmarks.length >= meshLandmarks.count ? meshLandmarks.symmetryLine : blazeFaceLandmarks.symmetryLine;
const angle = computeRotation(box4.landmarks[symmetryLine[0]], box4.landmarks[symmetryLine[1]]);
const faceCenter = getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotated = tf5.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
const rotationMatrix = buildRotationMatrix(-angle, faceCenter);
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, rotated, [inputSize8, inputSize8]);
const face5 = tf5.div(cut, 255);
tf5.dispose(cut);
tf5.dispose(rotated);
const largeAngle = angle && angle !== 0 && Math.abs(angle) > 0.2;
let rotationMatrix;
let face5;
if (largeAngle) {
const faceCenter = getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotated = tf5.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
face5 = cutBoxFromImageAndResize(box4, rotated, [inputSize8, inputSize8]);
tf5.dispose(rotated);
} else {
rotationMatrix = fixedRotationMatrix;
face5 = cutBoxFromImageAndResize(box4, input, [inputSize8, inputSize8]);
}
return [angle, rotationMatrix, face5];
}
@ -5417,7 +5427,8 @@ async function predict5(image25, config3, idx, count2) {
return new Promise(async (resolve) => {
const obj = [];
if (config3.face.emotion?.enabled) {
const resize = tf10.image.resizeBilinear(image25, [model6?.inputs[0].shape ? model6.inputs[0].shape[2] : 0, model6?.inputs[0].shape ? model6.inputs[0].shape[1] : 0], false);
const inputSize8 = model6?.inputs[0].shape ? model6.inputs[0].shape[2] : 0;
const resize = tf10.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
const [red, green, blue] = tf10.split(resize, 3, 3);
tf10.dispose(resize);
const redNorm = tf10.mul(red, rgb[0]);
@ -5590,35 +5601,33 @@ var model8 = null;
var inputSize5 = 0;
var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var detectedFaces = 0;
var enlargeFact = 1.6;
async function predict6(input, config3) {
const skipTime = (config3.face.detector?.skipTime || 0) > now() - lastTime6;
const skipFrame = skipped7 < (config3.face.detector?.skipFrames || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes2 = await getBoxes(input, config3);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
const possibleBoxes = await getBoxes(input, config3);
lastTime6 = now();
boxCache = [];
for (const possible of newBoxes2.boxes) {
const startPoint = await possible.box.startPoint.data();
const endPoint = await possible.box.endPoint.data();
const landmarks = await possible.landmarks.array();
boxCache.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
}
newBoxes2.boxes.forEach((prediction) => tf12.dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
for (let i = 0; i < boxCache.length; i++) {
const scaledBox = scaleBoxCoordinates({ startPoint: boxCache[i].startPoint, endPoint: boxCache[i].endPoint }, newBoxes2.scaleFactor);
const enlargedBox = enlargeBox(scaledBox);
const squarifiedBox = squarifyBox(enlargedBox);
boxCache[i] = { ...squarifiedBox, confidence: boxCache[i].confidence, landmarks: boxCache[i].landmarks };
for (const possible of possibleBoxes.boxes) {
const box4 = {
startPoint: await possible.box.startPoint.data(),
endPoint: await possible.box.endPoint.data(),
landmarks: await possible.landmarks.array(),
confidence: possible.confidence
};
boxCache.push(squarifyBox(enlargeBox(scaleBoxCoordinates(box4, possibleBoxes.scaleFactor), Math.sqrt(enlargeFact))));
}
possibleBoxes.boxes.forEach((prediction) => tf12.dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
skipped7 = 0;
} else {
skipped7++;
}
const faces = [];
const newBoxes = [];
const newCache = [];
let id = 0;
for (let box4 of boxCache) {
for (let i = 0; i < boxCache.length; i++) {
let box4 = boxCache[i];
let angle = 0;
let rotationMatrix;
const face5 = {
@ -5635,16 +5644,15 @@ async function predict6(input, config3) {
if (config3.face.detector?.rotation && config3.face.mesh?.enabled && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else {
rotationMatrix = IDENTITY_MATRIX;
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, config3.face.mesh?.enabled ? [inputSize5, inputSize5] : [size(), size()]);
face5.tensor = tf12.div(cut, 255);
tf12.dispose(cut);
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input, config3.face.mesh?.enabled ? [inputSize5, inputSize5] : [size(), size()]);
}
face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!config3.face.mesh?.enabled) {
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * box4.confidence || 0) / 100;
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
face5.score = face5.boxScore;
face5.mesh = box4.landmarks.map((pt) => [
(box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(),
(box4.startPoint[1] + box4.endPoint[1]) / 2 + (box4.endPoint[1] + box4.startPoint[1]) * pt[1] / size()
@ -5657,40 +5665,30 @@ async function predict6(input, config3) {
log("face mesh detection requested, but model is not loaded");
} else {
const [contours, confidence, contourCoords] = model8.execute(face5.tensor);
tf12.dispose(contours);
const faceConfidence = (await confidence.data())[0];
tf12.dispose(confidence);
const faceConfidence = await confidence.data();
face5.faceScore = Math.round(100 * faceConfidence[0]) / 100;
const coordsReshaped = tf12.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tf12.dispose(contourCoords);
tf12.dispose(coordsReshaped);
if (faceConfidence < (config3.face.detector?.minConfidence || 1)) {
box4.confidence = faceConfidence;
tf12.dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face5.faceScore < (config3.face.detector?.minConfidence || 1)) {
box4.confidence = face5.faceScore;
} else {
if (config3.face.iris?.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (config3.face.detector?.rotation && config3.face.mesh.enabled && config3.face.description?.enabled && env.kernels.includes("rotatewithoffset")) {
tf12.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
}
box4 = squarifyBox(enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact));
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * faceConfidence || 100 * box4.confidence || 0) / 100;
face5.faceScore = Math.round(100 * faceConfidence) / 100;
box4 = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
face5.score = face5.faceScore;
newCache.push(box4);
}
}
faces.push(face5);
newBoxes.push(box4);
}
if (config3.face.mesh?.enabled)
boxCache = newBoxes.filter((a) => a.confidence > (config3.face.detector?.minConfidence || 0));
detectedFaces = faces.length;
boxCache = [...newCache];
return faces;
}
async function load8(config3) {
@ -5738,10 +5736,9 @@ function enhance(input) {
const tensor3 = input.image || input.tensor || input;
if (!(tensor3 instanceof tf13.Tensor))
return null;
const box4 = [[0.05, 0.15, 0.85, 0.85]];
if (!model9?.inputs[0].shape)
return null;
const crop2 = tensor3.shape.length === 3 ? tf13.image.cropAndResize(tf13.expandDims(tensor3, 0), box4, [0], [model9.inputs[0].shape[2], model9.inputs[0].shape[1]]) : tf13.image.cropAndResize(tensor3, box4, [0], [model9.inputs[0].shape[2], model9.inputs[0].shape[1]]);
const crop2 = tf13.image.resizeBilinear(tensor3, [model9.inputs[0].shape[2], model9.inputs[0].shape[1]], false);
const norm = tf13.mul(crop2, 255);
return norm;
});
@ -11670,14 +11667,17 @@ var iris3 = (res) => {
center = true;
gestures.push({ iris: i, gesture: "facing center" });
}
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
const leftIrisCenterX = Math.abs(res[i].mesh[263][0] - res[i].annotations.leftEyeIris[0][0]) / res[i].box[2];
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
if (leftIrisCenterX > 0.06 || rightIrisCenterX > 0.06)
center = false;
if (leftIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking right" });
if (rightIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking left" });
if (leftIrisCenterX > rightIrisCenterX) {
if (leftIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking right" });
} else {
if (rightIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking left" });
}
const rightIrisCenterY = Math.abs(res[i].mesh[145][1] - res[i].annotations.rightEyeIris[0][1]) / res[i].box[3];
const leftIrisCenterY = Math.abs(res[i].mesh[374][1] - res[i].annotations.leftEyeIris[0][1]) / res[i].box[3];
if (leftIrisCenterY < 0.01 || rightIrisCenterY < 0.01 || leftIrisCenterY > 0.022 || rightIrisCenterY > 0.022)

View File

@ -4662,31 +4662,34 @@ var getRawBox = (box4, input) => box4 ? [
var scaleBoxCoordinates = (box4, factor) => {
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
return { startPoint, endPoint };
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
};
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
const h = image25.shape[1];
const w = image25.shape[2];
return tf5.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const crop2 = tf5.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const norm = tf5.div(crop2, 255);
tf5.dispose(crop2);
return norm;
};
var enlargeBox = (box4, factor = 1.5) => {
var enlargeBox = (box4, factor) => {
const center = getBoxCenter(box4);
const size2 = getBoxSize(box4);
const halfSize = [factor * size2[0] / 2, factor * size2[1] / 2];
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]], endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]], landmarks: box4.landmarks };
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]], endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]], landmarks: box4.landmarks, confidence: box4.confidence };
};
var squarifyBox = (box4) => {
const centers = getBoxCenter(box4);
const size2 = getBoxSize(box4);
const halfSize = Math.max(...size2) / 2;
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)], endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)], landmarks: box4.landmarks };
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)], endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)], landmarks: box4.landmarks, confidence: box4.confidence };
};
var calculateLandmarksBoundingBox = (landmarks) => {
const xs = landmarks.map((d) => d[0]);
const ys = landmarks.map((d) => d[1]);
return { startPoint: [Math.min(...xs), Math.min(...ys)], endPoint: [Math.max(...xs), Math.max(...ys)], landmarks };
};
var IDENTITY_MATRIX = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
var fixedRotationMatrix = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
var normalizeRadians = (angle) => angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));
var computeRotation = (point1, point2) => normalizeRadians(Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]));
var buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];
@ -4747,16 +4750,17 @@ function generateAnchors(inputSize8) {
}
return anchors4;
}
function transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize8) {
const boxSize = getBoxSize({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const coordsScaled = rawCoords.map((coord) => [
function transformRawCoords(coordsRaw, box4, angle, rotationMatrix, inputSize8) {
const boxSize = getBoxSize(box4);
const coordsScaled = coordsRaw.map((coord) => [
boxSize[0] / inputSize8 * (coord[0] - inputSize8 / 2),
boxSize[1] / inputSize8 * (coord[1] - inputSize8 / 2),
coord[2] || 0
]);
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
const coordsRotated = angle !== 0 ? coordsScaled.map((coord) => [...rotatePoint(coord, coordsRotationMatrix), coord[2]]) : coordsScaled;
const inverseRotationMatrix = angle !== 0 ? invertTransformMatrix(rotationMatrix) : IDENTITY_MATRIX;
const largeAngle = angle && angle !== 0 && Math.abs(angle) > 0.2;
const coordsRotationMatrix = largeAngle ? buildRotationMatrix(angle, [0, 0]) : fixedRotationMatrix;
const coordsRotated = largeAngle ? coordsScaled.map((coord) => [...rotatePoint(coord, coordsRotationMatrix), coord[2]]) : coordsScaled;
const inverseRotationMatrix = largeAngle ? invertTransformMatrix(rotationMatrix) : fixedRotationMatrix;
const boxCenter = [...getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint }), 1];
return coordsRotated.map((coord) => [
Math.round(coord[0] + dot(boxCenter, inverseRotationMatrix[0])),
@ -4767,14 +4771,20 @@ function transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize8)
function correctFaceRotation(box4, input, inputSize8) {
const symmetryLine = box4.landmarks.length >= meshLandmarks.count ? meshLandmarks.symmetryLine : blazeFaceLandmarks.symmetryLine;
const angle = computeRotation(box4.landmarks[symmetryLine[0]], box4.landmarks[symmetryLine[1]]);
const faceCenter = getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotated = tf5.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
const rotationMatrix = buildRotationMatrix(-angle, faceCenter);
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, rotated, [inputSize8, inputSize8]);
const face5 = tf5.div(cut, 255);
tf5.dispose(cut);
tf5.dispose(rotated);
const largeAngle = angle && angle !== 0 && Math.abs(angle) > 0.2;
let rotationMatrix;
let face5;
if (largeAngle) {
const faceCenter = getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotated = tf5.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
face5 = cutBoxFromImageAndResize(box4, rotated, [inputSize8, inputSize8]);
tf5.dispose(rotated);
} else {
rotationMatrix = fixedRotationMatrix;
face5 = cutBoxFromImageAndResize(box4, input, [inputSize8, inputSize8]);
}
return [angle, rotationMatrix, face5];
}
@ -5418,7 +5428,8 @@ async function predict5(image25, config3, idx, count2) {
return new Promise(async (resolve) => {
const obj = [];
if (config3.face.emotion?.enabled) {
const resize = tf10.image.resizeBilinear(image25, [model6?.inputs[0].shape ? model6.inputs[0].shape[2] : 0, model6?.inputs[0].shape ? model6.inputs[0].shape[1] : 0], false);
const inputSize8 = model6?.inputs[0].shape ? model6.inputs[0].shape[2] : 0;
const resize = tf10.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
const [red, green, blue] = tf10.split(resize, 3, 3);
tf10.dispose(resize);
const redNorm = tf10.mul(red, rgb[0]);
@ -5591,35 +5602,33 @@ var model8 = null;
var inputSize5 = 0;
var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var detectedFaces = 0;
var enlargeFact = 1.6;
async function predict6(input, config3) {
const skipTime = (config3.face.detector?.skipTime || 0) > now() - lastTime6;
const skipFrame = skipped7 < (config3.face.detector?.skipFrames || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes2 = await getBoxes(input, config3);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
const possibleBoxes = await getBoxes(input, config3);
lastTime6 = now();
boxCache = [];
for (const possible of newBoxes2.boxes) {
const startPoint = await possible.box.startPoint.data();
const endPoint = await possible.box.endPoint.data();
const landmarks = await possible.landmarks.array();
boxCache.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
}
newBoxes2.boxes.forEach((prediction) => tf12.dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
for (let i = 0; i < boxCache.length; i++) {
const scaledBox = scaleBoxCoordinates({ startPoint: boxCache[i].startPoint, endPoint: boxCache[i].endPoint }, newBoxes2.scaleFactor);
const enlargedBox = enlargeBox(scaledBox);
const squarifiedBox = squarifyBox(enlargedBox);
boxCache[i] = { ...squarifiedBox, confidence: boxCache[i].confidence, landmarks: boxCache[i].landmarks };
for (const possible of possibleBoxes.boxes) {
const box4 = {
startPoint: await possible.box.startPoint.data(),
endPoint: await possible.box.endPoint.data(),
landmarks: await possible.landmarks.array(),
confidence: possible.confidence
};
boxCache.push(squarifyBox(enlargeBox(scaleBoxCoordinates(box4, possibleBoxes.scaleFactor), Math.sqrt(enlargeFact))));
}
possibleBoxes.boxes.forEach((prediction) => tf12.dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
skipped7 = 0;
} else {
skipped7++;
}
const faces = [];
const newBoxes = [];
const newCache = [];
let id = 0;
for (let box4 of boxCache) {
for (let i = 0; i < boxCache.length; i++) {
let box4 = boxCache[i];
let angle = 0;
let rotationMatrix;
const face5 = {
@ -5636,16 +5645,15 @@ async function predict6(input, config3) {
if (config3.face.detector?.rotation && config3.face.mesh?.enabled && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else {
rotationMatrix = IDENTITY_MATRIX;
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, config3.face.mesh?.enabled ? [inputSize5, inputSize5] : [size(), size()]);
face5.tensor = tf12.div(cut, 255);
tf12.dispose(cut);
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input, config3.face.mesh?.enabled ? [inputSize5, inputSize5] : [size(), size()]);
}
face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!config3.face.mesh?.enabled) {
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * box4.confidence || 0) / 100;
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
face5.score = face5.boxScore;
face5.mesh = box4.landmarks.map((pt) => [
(box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(),
(box4.startPoint[1] + box4.endPoint[1]) / 2 + (box4.endPoint[1] + box4.startPoint[1]) * pt[1] / size()
@ -5658,40 +5666,30 @@ async function predict6(input, config3) {
log("face mesh detection requested, but model is not loaded");
} else {
const [contours, confidence, contourCoords] = model8.execute(face5.tensor);
tf12.dispose(contours);
const faceConfidence = (await confidence.data())[0];
tf12.dispose(confidence);
const faceConfidence = await confidence.data();
face5.faceScore = Math.round(100 * faceConfidence[0]) / 100;
const coordsReshaped = tf12.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tf12.dispose(contourCoords);
tf12.dispose(coordsReshaped);
if (faceConfidence < (config3.face.detector?.minConfidence || 1)) {
box4.confidence = faceConfidence;
tf12.dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face5.faceScore < (config3.face.detector?.minConfidence || 1)) {
box4.confidence = face5.faceScore;
} else {
if (config3.face.iris?.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (config3.face.detector?.rotation && config3.face.mesh.enabled && config3.face.description?.enabled && env.kernels.includes("rotatewithoffset")) {
tf12.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
}
box4 = squarifyBox(enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact));
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * faceConfidence || 100 * box4.confidence || 0) / 100;
face5.faceScore = Math.round(100 * faceConfidence) / 100;
box4 = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
face5.score = face5.faceScore;
newCache.push(box4);
}
}
faces.push(face5);
newBoxes.push(box4);
}
if (config3.face.mesh?.enabled)
boxCache = newBoxes.filter((a) => a.confidence > (config3.face.detector?.minConfidence || 0));
detectedFaces = faces.length;
boxCache = [...newCache];
return faces;
}
async function load8(config3) {
@ -5739,10 +5737,9 @@ function enhance(input) {
const tensor3 = input.image || input.tensor || input;
if (!(tensor3 instanceof tf13.Tensor))
return null;
const box4 = [[0.05, 0.15, 0.85, 0.85]];
if (!model9?.inputs[0].shape)
return null;
const crop2 = tensor3.shape.length === 3 ? tf13.image.cropAndResize(tf13.expandDims(tensor3, 0), box4, [0], [model9.inputs[0].shape[2], model9.inputs[0].shape[1]]) : tf13.image.cropAndResize(tensor3, box4, [0], [model9.inputs[0].shape[2], model9.inputs[0].shape[1]]);
const crop2 = tf13.image.resizeBilinear(tensor3, [model9.inputs[0].shape[2], model9.inputs[0].shape[1]], false);
const norm = tf13.mul(crop2, 255);
return norm;
});
@ -11671,14 +11668,17 @@ var iris3 = (res) => {
center = true;
gestures.push({ iris: i, gesture: "facing center" });
}
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
const leftIrisCenterX = Math.abs(res[i].mesh[263][0] - res[i].annotations.leftEyeIris[0][0]) / res[i].box[2];
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
if (leftIrisCenterX > 0.06 || rightIrisCenterX > 0.06)
center = false;
if (leftIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking right" });
if (rightIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking left" });
if (leftIrisCenterX > rightIrisCenterX) {
if (leftIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking right" });
} else {
if (rightIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking left" });
}
const rightIrisCenterY = Math.abs(res[i].mesh[145][1] - res[i].annotations.rightEyeIris[0][1]) / res[i].box[3];
const leftIrisCenterY = Math.abs(res[i].mesh[374][1] - res[i].annotations.leftEyeIris[0][1]) / res[i].box[3];
if (leftIrisCenterY < 0.01 || rightIrisCenterY < 0.01 || leftIrisCenterY > 0.022 || rightIrisCenterY > 0.022)

138
dist/human.node.js vendored
View File

@ -4661,31 +4661,34 @@ var getRawBox = (box4, input) => box4 ? [
var scaleBoxCoordinates = (box4, factor) => {
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
return { startPoint, endPoint };
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
};
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
const h = image25.shape[1];
const w = image25.shape[2];
return tf5.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const crop2 = tf5.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const norm = tf5.div(crop2, 255);
tf5.dispose(crop2);
return norm;
};
var enlargeBox = (box4, factor = 1.5) => {
var enlargeBox = (box4, factor) => {
const center = getBoxCenter(box4);
const size2 = getBoxSize(box4);
const halfSize = [factor * size2[0] / 2, factor * size2[1] / 2];
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]], endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]], landmarks: box4.landmarks };
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]], endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]], landmarks: box4.landmarks, confidence: box4.confidence };
};
var squarifyBox = (box4) => {
const centers = getBoxCenter(box4);
const size2 = getBoxSize(box4);
const halfSize = Math.max(...size2) / 2;
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)], endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)], landmarks: box4.landmarks };
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)], endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)], landmarks: box4.landmarks, confidence: box4.confidence };
};
var calculateLandmarksBoundingBox = (landmarks) => {
const xs = landmarks.map((d) => d[0]);
const ys = landmarks.map((d) => d[1]);
return { startPoint: [Math.min(...xs), Math.min(...ys)], endPoint: [Math.max(...xs), Math.max(...ys)], landmarks };
};
var IDENTITY_MATRIX = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
var fixedRotationMatrix = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
var normalizeRadians = (angle) => angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));
var computeRotation = (point1, point2) => normalizeRadians(Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]));
var buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];
@ -4746,16 +4749,17 @@ function generateAnchors(inputSize8) {
}
return anchors4;
}
function transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize8) {
const boxSize = getBoxSize({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const coordsScaled = rawCoords.map((coord) => [
function transformRawCoords(coordsRaw, box4, angle, rotationMatrix, inputSize8) {
const boxSize = getBoxSize(box4);
const coordsScaled = coordsRaw.map((coord) => [
boxSize[0] / inputSize8 * (coord[0] - inputSize8 / 2),
boxSize[1] / inputSize8 * (coord[1] - inputSize8 / 2),
coord[2] || 0
]);
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
const coordsRotated = angle !== 0 ? coordsScaled.map((coord) => [...rotatePoint(coord, coordsRotationMatrix), coord[2]]) : coordsScaled;
const inverseRotationMatrix = angle !== 0 ? invertTransformMatrix(rotationMatrix) : IDENTITY_MATRIX;
const largeAngle = angle && angle !== 0 && Math.abs(angle) > 0.2;
const coordsRotationMatrix = largeAngle ? buildRotationMatrix(angle, [0, 0]) : fixedRotationMatrix;
const coordsRotated = largeAngle ? coordsScaled.map((coord) => [...rotatePoint(coord, coordsRotationMatrix), coord[2]]) : coordsScaled;
const inverseRotationMatrix = largeAngle ? invertTransformMatrix(rotationMatrix) : fixedRotationMatrix;
const boxCenter = [...getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint }), 1];
return coordsRotated.map((coord) => [
Math.round(coord[0] + dot(boxCenter, inverseRotationMatrix[0])),
@ -4766,14 +4770,20 @@ function transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize8)
function correctFaceRotation(box4, input, inputSize8) {
const symmetryLine = box4.landmarks.length >= meshLandmarks.count ? meshLandmarks.symmetryLine : blazeFaceLandmarks.symmetryLine;
const angle = computeRotation(box4.landmarks[symmetryLine[0]], box4.landmarks[symmetryLine[1]]);
const faceCenter = getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotated = tf5.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
const rotationMatrix = buildRotationMatrix(-angle, faceCenter);
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, rotated, [inputSize8, inputSize8]);
const face5 = tf5.div(cut, 255);
tf5.dispose(cut);
tf5.dispose(rotated);
const largeAngle = angle && angle !== 0 && Math.abs(angle) > 0.2;
let rotationMatrix;
let face5;
if (largeAngle) {
const faceCenter = getBoxCenter({ startPoint: box4.startPoint, endPoint: box4.endPoint });
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotated = tf5.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
face5 = cutBoxFromImageAndResize(box4, rotated, [inputSize8, inputSize8]);
tf5.dispose(rotated);
} else {
rotationMatrix = fixedRotationMatrix;
face5 = cutBoxFromImageAndResize(box4, input, [inputSize8, inputSize8]);
}
return [angle, rotationMatrix, face5];
}
@ -5417,7 +5427,8 @@ async function predict5(image25, config3, idx, count2) {
return new Promise(async (resolve) => {
const obj = [];
if (config3.face.emotion?.enabled) {
const resize = tf10.image.resizeBilinear(image25, [model6?.inputs[0].shape ? model6.inputs[0].shape[2] : 0, model6?.inputs[0].shape ? model6.inputs[0].shape[1] : 0], false);
const inputSize8 = model6?.inputs[0].shape ? model6.inputs[0].shape[2] : 0;
const resize = tf10.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
const [red, green, blue] = tf10.split(resize, 3, 3);
tf10.dispose(resize);
const redNorm = tf10.mul(red, rgb[0]);
@ -5590,35 +5601,33 @@ var model8 = null;
var inputSize5 = 0;
var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var detectedFaces = 0;
var enlargeFact = 1.6;
async function predict6(input, config3) {
const skipTime = (config3.face.detector?.skipTime || 0) > now() - lastTime6;
const skipFrame = skipped7 < (config3.face.detector?.skipFrames || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes2 = await getBoxes(input, config3);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
const possibleBoxes = await getBoxes(input, config3);
lastTime6 = now();
boxCache = [];
for (const possible of newBoxes2.boxes) {
const startPoint = await possible.box.startPoint.data();
const endPoint = await possible.box.endPoint.data();
const landmarks = await possible.landmarks.array();
boxCache.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
}
newBoxes2.boxes.forEach((prediction) => tf12.dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
for (let i = 0; i < boxCache.length; i++) {
const scaledBox = scaleBoxCoordinates({ startPoint: boxCache[i].startPoint, endPoint: boxCache[i].endPoint }, newBoxes2.scaleFactor);
const enlargedBox = enlargeBox(scaledBox);
const squarifiedBox = squarifyBox(enlargedBox);
boxCache[i] = { ...squarifiedBox, confidence: boxCache[i].confidence, landmarks: boxCache[i].landmarks };
for (const possible of possibleBoxes.boxes) {
const box4 = {
startPoint: await possible.box.startPoint.data(),
endPoint: await possible.box.endPoint.data(),
landmarks: await possible.landmarks.array(),
confidence: possible.confidence
};
boxCache.push(squarifyBox(enlargeBox(scaleBoxCoordinates(box4, possibleBoxes.scaleFactor), Math.sqrt(enlargeFact))));
}
possibleBoxes.boxes.forEach((prediction) => tf12.dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
skipped7 = 0;
} else {
skipped7++;
}
const faces = [];
const newBoxes = [];
const newCache = [];
let id = 0;
for (let box4 of boxCache) {
for (let i = 0; i < boxCache.length; i++) {
let box4 = boxCache[i];
let angle = 0;
let rotationMatrix;
const face5 = {
@ -5635,16 +5644,15 @@ async function predict6(input, config3) {
if (config3.face.detector?.rotation && config3.face.mesh?.enabled && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else {
rotationMatrix = IDENTITY_MATRIX;
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, config3.face.mesh?.enabled ? [inputSize5, inputSize5] : [size(), size()]);
face5.tensor = tf12.div(cut, 255);
tf12.dispose(cut);
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input, config3.face.mesh?.enabled ? [inputSize5, inputSize5] : [size(), size()]);
}
face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!config3.face.mesh?.enabled) {
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * box4.confidence || 0) / 100;
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
face5.score = face5.boxScore;
face5.mesh = box4.landmarks.map((pt) => [
(box4.startPoint[0] + box4.endPoint[0]) / 2 + (box4.endPoint[0] + box4.startPoint[0]) * pt[0] / size(),
(box4.startPoint[1] + box4.endPoint[1]) / 2 + (box4.endPoint[1] + box4.startPoint[1]) * pt[1] / size()
@ -5657,40 +5665,30 @@ async function predict6(input, config3) {
log("face mesh detection requested, but model is not loaded");
} else {
const [contours, confidence, contourCoords] = model8.execute(face5.tensor);
tf12.dispose(contours);
const faceConfidence = (await confidence.data())[0];
tf12.dispose(confidence);
const faceConfidence = await confidence.data();
face5.faceScore = Math.round(100 * faceConfidence[0]) / 100;
const coordsReshaped = tf12.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tf12.dispose(contourCoords);
tf12.dispose(coordsReshaped);
if (faceConfidence < (config3.face.detector?.minConfidence || 1)) {
box4.confidence = faceConfidence;
tf12.dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face5.faceScore < (config3.face.detector?.minConfidence || 1)) {
box4.confidence = face5.faceScore;
} else {
if (config3.face.iris?.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (config3.face.detector?.rotation && config3.face.mesh.enabled && config3.face.description?.enabled && env.kernels.includes("rotatewithoffset")) {
tf12.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
}
box4 = squarifyBox(enlargeBox(calculateLandmarksBoundingBox(face5.mesh), enlargeFact));
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * faceConfidence || 100 * box4.confidence || 0) / 100;
face5.faceScore = Math.round(100 * faceConfidence) / 100;
box4 = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
face5.score = face5.faceScore;
newCache.push(box4);
}
}
faces.push(face5);
newBoxes.push(box4);
}
if (config3.face.mesh?.enabled)
boxCache = newBoxes.filter((a) => a.confidence > (config3.face.detector?.minConfidence || 0));
detectedFaces = faces.length;
boxCache = [...newCache];
return faces;
}
async function load8(config3) {
@ -5738,10 +5736,9 @@ function enhance(input) {
const tensor3 = input.image || input.tensor || input;
if (!(tensor3 instanceof tf13.Tensor))
return null;
const box4 = [[0.05, 0.15, 0.85, 0.85]];
if (!model9?.inputs[0].shape)
return null;
const crop2 = tensor3.shape.length === 3 ? tf13.image.cropAndResize(tf13.expandDims(tensor3, 0), box4, [0], [model9.inputs[0].shape[2], model9.inputs[0].shape[1]]) : tf13.image.cropAndResize(tensor3, box4, [0], [model9.inputs[0].shape[2], model9.inputs[0].shape[1]]);
const crop2 = tf13.image.resizeBilinear(tensor3, [model9.inputs[0].shape[2], model9.inputs[0].shape[1]], false);
const norm = tf13.mul(crop2, 255);
return norm;
});
@ -11670,14 +11667,17 @@ var iris3 = (res) => {
center = true;
gestures.push({ iris: i, gesture: "facing center" });
}
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
const leftIrisCenterX = Math.abs(res[i].mesh[263][0] - res[i].annotations.leftEyeIris[0][0]) / res[i].box[2];
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
if (leftIrisCenterX > 0.06 || rightIrisCenterX > 0.06)
center = false;
if (leftIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking right" });
if (rightIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking left" });
if (leftIrisCenterX > rightIrisCenterX) {
if (leftIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking right" });
} else {
if (rightIrisCenterX > 0.05)
gestures.push({ iris: i, gesture: "looking left" });
}
const rightIrisCenterY = Math.abs(res[i].mesh[145][1] - res[i].annotations.rightEyeIris[0][1]) / res[i].box[3];
const leftIrisCenterY = Math.abs(res[i].mesh[374][1] - res[i].annotations.leftEyeIris[0][1]) / res[i].box[3];
if (leftIrisCenterY < 0.01 || rightIrisCenterY < 0.01 || leftIrisCenterY > 0.022 || rightIrisCenterY > 0.022)

View File

@ -18,48 +18,45 @@ import type { FaceResult, Point } from '../result';
import type { Config } from '../config';
import { env } from '../util/env';
type BoxCache = { startPoint: Point, endPoint: Point, landmarks: Array<Point>, confidence: number, faceConfidence?: number | undefined };
type BoxCache = { startPoint: Point, endPoint: Point, landmarks: Array<Point>, confidence: number };
let boxCache: Array<BoxCache> = [];
let model: GraphModel | null = null;
let inputSize = 0;
let skipped = Number.MAX_SAFE_INTEGER;
let lastTime = 0;
let detectedFaces = 0;
const enlargeFact = 1.6;
export async function predict(input: Tensor, config: Config): Promise<FaceResult[]> {
// reset cached boxes
const skipTime = (config.face.detector?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.detector?.skipFrames || 0);
if (!config.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes = await blazeface.getBoxes(input, config); // get results from blazeface detector
if (!config.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
const possibleBoxes = await blazeface.getBoxes(input, config); // get results from blazeface detector
lastTime = now();
boxCache = []; // empty cache
for (const possible of newBoxes.boxes) { // extract data from detector
const startPoint = await possible.box.startPoint.data() as unknown as Point;
const endPoint = await possible.box.endPoint.data() as unknown as Point;
const landmarks = await possible.landmarks.array() as Array<Point>;
boxCache.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
}
newBoxes.boxes.forEach((prediction) => tf.dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
for (let i = 0; i < boxCache.length; i++) { // enlarge and squarify detected boxes
const scaledBox = util.scaleBoxCoordinates({ startPoint: boxCache[i].startPoint, endPoint: boxCache[i].endPoint }, newBoxes.scaleFactor);
const enlargedBox = util.enlargeBox(scaledBox);
const squarifiedBox = util.squarifyBox(enlargedBox);
boxCache[i] = { ...squarifiedBox, confidence: boxCache[i].confidence, landmarks: boxCache[i].landmarks };
for (const possible of possibleBoxes.boxes) { // extract data from detector
const box: BoxCache = {
startPoint: await possible.box.startPoint.data() as unknown as Point,
endPoint: await possible.box.endPoint.data() as unknown as Point,
landmarks: await possible.landmarks.array() as Array<Point>,
confidence: possible.confidence,
};
boxCache.push(util.squarifyBox(util.enlargeBox(util.scaleBoxCoordinates(box, possibleBoxes.scaleFactor), Math.sqrt(enlargeFact))));
}
possibleBoxes.boxes.forEach((prediction) => tf.dispose([prediction.box.startPoint, prediction.box.endPoint, prediction.landmarks]));
skipped = 0;
} else {
skipped++;
}
const faces: Array<FaceResult> = [];
const newBoxes: Array<BoxCache> = [];
const newCache: Array<BoxCache> = [];
let id = 0;
for (let box of boxCache) {
for (let i = 0; i < boxCache.length; i++) {
let box = boxCache[i];
let angle = 0;
let rotationMatrix;
const face: FaceResult = {
const face: FaceResult = { // init face result
id: id++,
mesh: [],
meshRaw: [],
@ -74,16 +71,15 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
if (config.face.detector?.rotation && config.face.mesh?.enabled && env.kernels.includes('rotatewithoffset')) {
[angle, rotationMatrix, face.tensor] = util.correctFaceRotation(box, input, inputSize);
} else {
rotationMatrix = util.IDENTITY_MATRIX;
const cut = util.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, input, config.face.mesh?.enabled ? [inputSize, inputSize] : [blazeface.size(), blazeface.size()]);
face.tensor = tf.div(cut, 255);
tf.dispose(cut);
rotationMatrix = util.fixedRotationMatrix;
face.tensor = util.cutBoxFromImageAndResize(box, input, config.face.mesh?.enabled ? [inputSize, inputSize] : [blazeface.size(), blazeface.size()]);
}
face.boxScore = Math.round(100 * box.confidence) / 100;
if (!config.face.mesh?.enabled) { // mesh not enabled, return resuts from detector only
face.box = util.getClampedBox(box, input);
face.boxRaw = util.getRawBox(box, input);
face.score = Math.round(100 * box.confidence || 0) / 100;
face.boxScore = Math.round(100 * box.confidence || 0) / 100;
face.score = face.boxScore;
face.mesh = box.landmarks.map((pt) => [
((box.startPoint[0] + box.endPoint[0])) / 2 + ((box.endPoint[0] + box.startPoint[0]) * pt[0] / blazeface.size()),
((box.startPoint[1] + box.endPoint[1])) / 2 + ((box.endPoint[1] + box.startPoint[1]) * pt[1] / blazeface.size()),
@ -94,37 +90,36 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
if (config.debug) log('face mesh detection requested, but model is not loaded');
} else { // mesh enabled
const [contours, confidence, contourCoords] = model.execute(face.tensor as Tensor) as Array<Tensor>; // first returned tensor represents facial contours which are already included in the coordinates.
tf.dispose(contours);
const faceConfidence = (await confidence.data())[0] as number;
tf.dispose(confidence);
const faceConfidence = await confidence.data();
face.faceScore = Math.round(100 * faceConfidence[0]) / 100;
const coordsReshaped = tf.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tf.dispose(contourCoords);
tf.dispose(coordsReshaped);
if (faceConfidence < (config.face.detector?.minConfidence || 1)) {
box.confidence = faceConfidence; // reset confidence of cached box
tf.dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face.faceScore < (config.face.detector?.minConfidence || 1)) { // low confidence in detected mesh
box.confidence = face.faceScore; // reset confidence of cached box
} else {
if (config.face.iris?.enabled) rawCoords = await iris.augmentIris(rawCoords, face.tensor, config, inputSize); // augment results with iris
face.mesh = util.transformRawCoords(rawCoords, box, angle, rotationMatrix, inputSize); // get processed mesh
face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]);
box = { ...util.enlargeBox(util.calculateLandmarksBoundingBox(face.mesh), 1.5), confidence: box.confidence }; // redefine box with mesh calculated one
for (const key of Object.keys(coords.meshAnnotations)) face.annotations[key] = coords.meshAnnotations[key].map((index) => face.mesh[index]); // add annotations
if (config.face.detector?.rotation && config.face.mesh.enabled && config.face.description?.enabled && env.kernels.includes('rotatewithoffset')) { // do rotation one more time with mesh keypoints if we want to return perfect image
tf.dispose(face.tensor); // dispose so we can overwrite original face
[angle, rotationMatrix, face.tensor] = util.correctFaceRotation(box, input, inputSize);
}
box = util.squarifyBox(util.enlargeBox(util.calculateLandmarksBoundingBox(face.mesh), enlargeFact)); // redefine box with mesh calculated one
face.box = util.getClampedBox(box, input); // update detected box with box around the face mesh
face.boxRaw = util.getRawBox(box, input);
face.score = Math.round(100 * faceConfidence || 100 * box.confidence || 0) / 100;
face.faceScore = Math.round(100 * faceConfidence) / 100;
box = { ...util.squarifyBox(box), confidence: box.confidence, faceConfidence }; // updated stored cache values
face.score = face.faceScore;
newCache.push(box);
// other modules prefer wider crop for a face so we dispose it and do it again
/*
tf.dispose(face.tensor);
face.tensor = config.face.detector?.rotation && config.face.mesh?.enabled && env.kernels.includes('rotatewithoffset')
? face.tensor = util.correctFaceRotation(util.enlargeBox(box, Math.sqrt(enlargeFact)), input, inputSize)[2]
: face.tensor = util.cutBoxFromImageAndResize(util.enlargeBox(box, Math.sqrt(enlargeFact)), input, [inputSize, inputSize]);
*/
}
}
faces.push(face);
newBoxes.push(box);
}
if (config.face.mesh?.enabled) boxCache = newBoxes.filter((a) => a.confidence > (config.face.detector?.minConfidence || 0)); // remove cache entries for detected boxes on low confidence
detectedFaces = faces.length;
boxCache = [...newCache]; // reset cache
return faces;
}

View File

@ -32,36 +32,39 @@ export const getRawBox = (box, input): Box => (box ? [
export const scaleBoxCoordinates = (box, factor) => {
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
return { startPoint, endPoint };
return { startPoint, endPoint, landmarks: box.landmarks, confidence: box.confidence };
};
export const cutBoxFromImageAndResize = (box, image, cropSize) => {
const h = image.shape[1];
const w = image.shape[2];
return tf.image.cropAndResize(image, [[box.startPoint[1] / h, box.startPoint[0] / w, box.endPoint[1] / h, box.endPoint[0] / w]], [0], cropSize);
const crop = tf.image.cropAndResize(image, [[box.startPoint[1] / h, box.startPoint[0] / w, box.endPoint[1] / h, box.endPoint[0] / w]], [0], cropSize);
const norm = tf.div(crop, 255);
tf.dispose(crop);
return norm;
};
export const enlargeBox = (box, factor = 1.5) => {
export const enlargeBox = (box, factor) => {
const center = getBoxCenter(box);
const size = getBoxSize(box);
const halfSize: [number, number] = [factor * size[0] / 2, factor * size[1] / 2];
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]] as Point, endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]] as Point, landmarks: box.landmarks };
return { startPoint: [center[0] - halfSize[0], center[1] - halfSize[1]] as Point, endPoint: [center[0] + halfSize[0], center[1] + halfSize[1]] as Point, landmarks: box.landmarks, confidence: box.confidence };
};
export const squarifyBox = (box) => {
const centers = getBoxCenter(box);
const size = getBoxSize(box);
const halfSize = Math.max(...size) / 2;
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)] as Point, endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)] as Point, landmarks: box.landmarks };
return { startPoint: [Math.round(centers[0] - halfSize), Math.round(centers[1] - halfSize)] as Point, endPoint: [Math.round(centers[0] + halfSize), Math.round(centers[1] + halfSize)] as Point, landmarks: box.landmarks, confidence: box.confidence };
};
export const calculateLandmarksBoundingBox = (landmarks) => {
const xs = landmarks.map((d) => d[0]);
const ys = landmarks.map((d) => d[1]);
return { startPoint: [Math.min(...xs), Math.min(...ys)], endPoint: [Math.max(...xs), Math.max(...ys)], landmarks };
return { startPoint: [Math.min(...xs), Math.min(...ys)] as Point, endPoint: [Math.max(...xs), Math.max(...ys)] as Point, landmarks };
};
export const IDENTITY_MATRIX = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
export const fixedRotationMatrix = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
export const normalizeRadians = (angle) => angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));
@ -71,7 +74,7 @@ export const radToDegrees = (rad) => rad * 180 / Math.PI;
export const buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];
export const dot = (v1, v2) => {
export const dot = (v1: number[], v2: number[]) => {
let product = 0;
for (let i = 0; i < v1.length; i++) product += v1[i] * v2[i];
return product;
@ -133,16 +136,17 @@ export function generateAnchors(inputSize) {
return anchors;
}
export function transformRawCoords(rawCoords, box, angle, rotationMatrix, inputSize) {
const boxSize = getBoxSize({ startPoint: box.startPoint, endPoint: box.endPoint });
const coordsScaled = rawCoords.map((coord) => ([
export function transformRawCoords(coordsRaw, box, angle, rotationMatrix, inputSize) {
const boxSize = getBoxSize(box);
const coordsScaled = coordsRaw.map((coord) => ([ // scaled around zero-point
boxSize[0] / inputSize * (coord[0] - inputSize / 2),
boxSize[1] / inputSize * (coord[1] - inputSize / 2),
coord[2] || 0,
]));
const coordsRotationMatrix = (angle !== 0) ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
const coordsRotated = (angle !== 0) ? coordsScaled.map((coord) => ([...rotatePoint(coord, coordsRotationMatrix), coord[2]])) : coordsScaled;
const inverseRotationMatrix = (angle !== 0) ? invertTransformMatrix(rotationMatrix) : IDENTITY_MATRIX;
const largeAngle = angle && (angle !== 0) && (Math.abs(angle) > 0.2);
const coordsRotationMatrix = largeAngle ? buildRotationMatrix(angle, [0, 0]) : fixedRotationMatrix;
const coordsRotated = largeAngle ? coordsScaled.map((coord) => ([...rotatePoint(coord, coordsRotationMatrix), coord[2]])) : coordsScaled;
const inverseRotationMatrix = largeAngle ? invertTransformMatrix(rotationMatrix) : fixedRotationMatrix;
const boxCenter = [...getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint }), 1];
return coordsRotated.map((coord) => ([
Math.round(coord[0] + dot(boxCenter, inverseRotationMatrix[0])),
@ -154,13 +158,19 @@ export function transformRawCoords(rawCoords, box, angle, rotationMatrix, inputS
export function correctFaceRotation(box, input, inputSize) {
const symmetryLine = (box.landmarks.length >= coords.meshLandmarks.count) ? coords.meshLandmarks.symmetryLine : coords.blazeFaceLandmarks.symmetryLine;
const angle: number = computeRotation(box.landmarks[symmetryLine[0]], box.landmarks[symmetryLine[1]]);
const faceCenter: Point = getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint });
const faceCenterNormalized: Point = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotated = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized); // rotateWithOffset is not defined for tfjs-node
const rotationMatrix = buildRotationMatrix(-angle, faceCenter);
const cut = cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, rotated, [inputSize, inputSize]);
const face = tf.div(cut, 255);
tf.dispose(cut);
tf.dispose(rotated);
const largeAngle = angle && (angle !== 0) && (Math.abs(angle) > 0.2);
let rotationMatrix;
let face;
if (largeAngle) {
const faceCenter: Point = getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint });
const faceCenterNormalized: Point = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotated = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized); // rotateWithOffset is not defined for tfjs-node
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
face = cutBoxFromImageAndResize(box, rotated, [inputSize, inputSize]);
tf.dispose(rotated);
} else {
rotationMatrix = fixedRotationMatrix;
face = cutBoxFromImageAndResize(box, input, [inputSize, inputSize]);
}
return [angle, rotationMatrix, face];
}

View File

@ -43,13 +43,14 @@ export function enhance(input): Tensor {
const tensor = input.image || input.tensor || input;
if (!(tensor instanceof tf.Tensor)) return null;
// do a tight crop of image and resize it to fit the model
const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
// const box = [[0.0, 0.0, 1.0, 1.0]]; // basically no crop for test
if (!model?.inputs[0].shape) return null; // model has no shape so no point continuing
const crop = tf.image.resizeBilinear(tensor, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
/*
const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
const crop = (tensor.shape.length === 3)
? tf.image.cropAndResize(tf.expandDims(tensor, 0), box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) // add batch dimension if missing
: tf.image.cropAndResize(tensor, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
*/
/*
// just resize to fit the embedding model instead of cropping
const crop = tf.image.resizeBilinear(tensor, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);

View File

@ -43,7 +43,11 @@ export async function predict(image: Tensor, config: Config, idx, count) {
return new Promise(async (resolve) => {
const obj: Array<{ score: number, emotion: string }> = [];
if (config.face.emotion?.enabled) {
const resize = tf.image.resizeBilinear(image, [model?.inputs[0].shape ? model.inputs[0].shape[2] : 0, model?.inputs[0].shape ? model.inputs[0].shape[1] : 0], false);
const inputSize = model?.inputs[0].shape ? model.inputs[0].shape[2] : 0;
const resize = tf.image.resizeBilinear(image, [inputSize, inputSize], false);
// const box = [[0.15, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
// const resize = tf.image.cropAndResize(image, box, [0], [inputSize, inputSize]);
const [red, green, blue] = tf.split(resize, 3, 3);
tf.dispose(resize);
// weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html

View File

@ -100,11 +100,14 @@ export const iris = (res): GestureResult[] => {
gestures.push({ iris: i, gesture: 'facing center' });
}
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
const leftIrisCenterX = Math.abs(res[i].mesh[263][0] - res[i].annotations.leftEyeIris[0][0]) / res[i].box[2];
const rightIrisCenterX = Math.abs(res[i].mesh[33][0] - res[i].annotations.rightEyeIris[0][0]) / res[i].box[2];
if (leftIrisCenterX > 0.06 || rightIrisCenterX > 0.06) center = false;
if (leftIrisCenterX > 0.05) gestures.push({ iris: i, gesture: 'looking right' });
if (rightIrisCenterX > 0.05) gestures.push({ iris: i, gesture: 'looking left' });
if (leftIrisCenterX > rightIrisCenterX) { // check eye with bigger offset
if (leftIrisCenterX > 0.05) gestures.push({ iris: i, gesture: 'looking right' });
} else {
if (rightIrisCenterX > 0.05) gestures.push({ iris: i, gesture: 'looking left' });
}
const rightIrisCenterY = Math.abs(res[i].mesh[145][1] - res[i].annotations.rightEyeIris[0][1]) / res[i].box[3];
const leftIrisCenterY = Math.abs(res[i].mesh[374][1] - res[i].annotations.leftEyeIris[0][1]) / res[i].box[3];

View File

@ -1,348 +1,25 @@
2021-11-02 11:02:12 INFO:  @vladmandic/human version 2.5.0
2021-11-02 11:02:12 INFO:  User: vlado Platform: linux Arch: x64 Node: v17.0.1
2021-11-02 11:02:12 INFO:  Application: {"name":"@vladmandic/human","version":"2.5.0"}
2021-11-02 11:02:12 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2021-11-02 11:02:12 INFO:  Toolchain: {"build":"0.6.3","esbuild":"0.13.12","typescript":"4.4.4","typedoc":"0.22.7","eslint":"8.1.0"}
2021-11-02 11:02:12 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2021-11-02 11:02:12 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]}
2021-11-02 11:02:12 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:02:12 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516386,"outputBytes":432583}
2021-11-02 11:02:12 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:02:12 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516394,"outputBytes":432587}
2021-11-02 11:02:12 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:02:12 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516461,"outputBytes":432659}
2021-11-02 11:02:12 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:02:12 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:02:12 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516084,"outputBytes":434514}
2021-11-02 11:02:12 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:02:13 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014670,"outputBytes":1607745}
2021-11-02 11:02:13 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014670,"outputBytes":2936138}
2021-11-02 11:02:30 STATE: Typings: {"input":"src/human.ts","output":"types","files":48}
2021-11-02 11:02:36 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":48,"generated":true}
2021-11-02 11:02:36 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:03:06 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":92,"errors":0,"warnings":0}
2021-11-02 11:03:07 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2021-11-02 11:03:07 INFO:  Done...
2021-11-02 11:08:10 INFO:  @vladmandic/human version 2.5.0
2021-11-02 11:08:10 INFO:  User: vlado Platform: linux Arch: x64 Node: v17.0.1
2021-11-02 11:08:10 INFO:  Application: {"name":"@vladmandic/human","version":"2.5.0"}
2021-11-02 11:08:10 INFO:  Environment: {"profile":"development","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2021-11-02 11:08:10 INFO:  Toolchain: {"build":"0.6.3","esbuild":"0.13.12","typescript":"4.4.4","typedoc":"0.22.7","eslint":"8.1.0"}
2021-11-02 11:08:10 INFO:  Build: {"profile":"development","steps":["serve","watch","compile"]}
2021-11-02 11:08:10 STATE: WebServer: {"ssl":false,"port":10030,"root":"."}
2021-11-02 11:08:10 STATE: WebServer: {"ssl":true,"port":10031,"root":".","sslKey":"node_modules/@vladmandic/build/cert/https.key","sslCrt":"node_modules/@vladmandic/build/cert/https.crt"}
2021-11-02 11:08:10 STATE: Watch: {"locations":["src/**","README.md","src/**/*","tfjs/**/*","demo/**/*.ts"]}
2021-11-02 11:08:10 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:08:10 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516467,"outputBytes":432664}
2021-11-02 11:08:10 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:08:10 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516475,"outputBytes":432668}
2021-11-02 11:08:10 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:08:10 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516542,"outputBytes":432740}
2021-11-02 11:08:10 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:08:10 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:08:10 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516165,"outputBytes":434595}
2021-11-02 11:08:10 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:08:11 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014751,"outputBytes":1607797}
2021-11-02 11:08:11 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014751,"outputBytes":2936219}
2021-11-02 11:08:11 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:08:11 INFO:  Listening...
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/html","size":7842,"url":"/?backend=wasm&warmup=true","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/css","size":107884,"url":"/icons.css","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":48222,"url":"/index.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":2936219,"url":"/dist/human.esm.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":14514,"url":"/helpers/menu.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":10901,"url":"/helpers/gl-bench.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":3406,"url":"/helpers/webrtc.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":5785,"url":"/helpers/jsonview.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4770714,"url":"/dist/human.esm.js.map","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":4115,"url":"/index-pwa.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:48 INFO:  Watch: {"event":"modify","input":"src/gear/emotion.ts"}
2021-11-02 11:08:48 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:08:48 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516467,"outputBytes":432664}
2021-11-02 11:08:48 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:08:48 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516475,"outputBytes":432668}
2021-11-02 11:08:48 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:08:48 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516542,"outputBytes":432740}
2021-11-02 11:08:48 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:08:48 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:08:48 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516165,"outputBytes":434595}
2021-11-02 11:08:48 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:08:49 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014751,"outputBytes":1607797}
2021-11-02 11:08:49 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014751,"outputBytes":2936219}
2021-11-02 11:08:49 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:08:53 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/html","size":7842,"url":"/?backend=wasm&warmup=true","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:53 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/css","size":107884,"url":"/icons.css","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:53 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":48222,"url":"/index.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:53 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":2936219,"url":"/dist/human.esm.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:53 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":14514,"url":"/helpers/menu.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:53 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":10901,"url":"/helpers/gl-bench.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:53 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":3406,"url":"/helpers/webrtc.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:53 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":5785,"url":"/helpers/jsonview.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:53 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:53 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4770714,"url":"/dist/human.esm.js.map","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:54 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"image/x-icon","size":261950,"url":"/favicon.ico","remote":"::ffff:192.168.0.200"}
2021-11-02 11:08:54 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":4115,"url":"/index-pwa.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:08 INFO:  Watch: {"event":"modify","input":"src/gear/emotion.ts"}
2021-11-02 11:09:08 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:09:08 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516493,"outputBytes":432690}
2021-11-02 11:09:08 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:09:09 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516501,"outputBytes":432694}
2021-11-02 11:09:09 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:09:09 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516568,"outputBytes":432766}
2021-11-02 11:09:09 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:09:09 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:09:09 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516191,"outputBytes":434621}
2021-11-02 11:09:09 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:09:09 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014777,"outputBytes":1607820}
2021-11-02 11:09:10 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014777,"outputBytes":2936245}
2021-11-02 11:09:10 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:09:12 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/html","size":7842,"url":"/?backend=wasm&warmup=true","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:12 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/css","size":107884,"url":"/icons.css","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:12 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":48222,"url":"/index.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:12 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":2936245,"url":"/dist/human.esm.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:12 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":14514,"url":"/helpers/menu.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:12 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":10901,"url":"/helpers/gl-bench.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:12 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":3406,"url":"/helpers/webrtc.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:12 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":5785,"url":"/helpers/jsonview.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:12 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4770756,"url":"/dist/human.esm.js.map","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"image/x-icon","size":261950,"url":"/favicon.ico","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":4115,"url":"/index-pwa.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/html","size":7842,"url":"/?backend=wasm&warmup=true&worker=false","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/css","size":107884,"url":"/icons.css","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":48222,"url":"/index.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":2936245,"url":"/dist/human.esm.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":14514,"url":"/helpers/menu.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":10901,"url":"/helpers/gl-bench.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":3406,"url":"/helpers/webrtc.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":5785,"url":"/helpers/jsonview.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4770756,"url":"/dist/human.esm.js.map","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":4115,"url":"/index-pwa.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":79038,"url":"/models/blazeface.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":89289,"url":"/models/facemesh.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":122025,"url":"/models/iris.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":602812,"url":"/models/handtrack.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":82231,"url":"/models/handlandmark-full.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":161813,"url":"/models/movenet-lightning.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":17980,"url":"/models/emotion.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":71432,"url":"/models/faceres.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":538928,"url":"/models/blazeface.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2955780,"url":"/models/facemesh.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2599092,"url":"/models/iris.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":5431368,"url":"/models/handlandmark-full.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":820516,"url":"/models/emotion.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4650216,"url":"/models/movenet-lightning.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:42 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":6978814,"url":"/models/faceres.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:09:43 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2964837,"url":"/models/handtrack.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:19 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/html","size":7842,"url":"/?backend=webgl&warmup=true&worker=false","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:19 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/css","size":107884,"url":"/icons.css","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:19 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":48222,"url":"/index.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:19 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":2936245,"url":"/dist/human.esm.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:19 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":14514,"url":"/helpers/menu.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:19 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":10901,"url":"/helpers/gl-bench.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:19 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":3406,"url":"/helpers/webrtc.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:19 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":5785,"url":"/helpers/jsonview.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:19 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:20 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4770756,"url":"/dist/human.esm.js.map","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:20 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":4115,"url":"/index-pwa.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":79038,"url":"/models/blazeface.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":89289,"url":"/models/facemesh.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":122025,"url":"/models/iris.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":602812,"url":"/models/handtrack.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":82231,"url":"/models/handlandmark-full.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":161813,"url":"/models/movenet-lightning.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":17980,"url":"/models/emotion.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":71432,"url":"/models/faceres.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":538928,"url":"/models/blazeface.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2955780,"url":"/models/facemesh.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2599092,"url":"/models/iris.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":5431368,"url":"/models/handlandmark-full.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":820516,"url":"/models/emotion.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":6978814,"url":"/models/faceres.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4650216,"url":"/models/movenet-lightning.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:10:21 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2964837,"url":"/models/handtrack.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:43 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/html","size":7842,"url":"/?backend=wasm&warmup=true&worker=false","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:44 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/css","size":107884,"url":"/icons.css","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:44 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":48222,"url":"/index.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:44 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":2936245,"url":"/dist/human.esm.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:44 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":14514,"url":"/helpers/menu.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:44 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":10901,"url":"/helpers/gl-bench.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:44 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":3406,"url":"/helpers/webrtc.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:44 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":5785,"url":"/helpers/jsonview.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:44 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:44 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4770756,"url":"/dist/human.esm.js.map","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:44 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":4115,"url":"/index-pwa.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":79038,"url":"/models/blazeface.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":89289,"url":"/models/facemesh.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":122025,"url":"/models/iris.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":602812,"url":"/models/handtrack.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":82231,"url":"/models/handlandmark-full.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":161813,"url":"/models/movenet-lightning.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":17980,"url":"/models/emotion.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":71432,"url":"/models/faceres.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":538928,"url":"/models/blazeface.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2599092,"url":"/models/iris.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":5431368,"url":"/models/handlandmark-full.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2955780,"url":"/models/facemesh.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":820516,"url":"/models/emotion.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":6978814,"url":"/models/faceres.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4650216,"url":"/models/movenet-lightning.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:13:46 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2964837,"url":"/models/handtrack.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:17:33 INFO:  Watch: {"event":"modify","input":"src/gear/emotion.ts"}
2021-11-02 11:17:33 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:17:33 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516386,"outputBytes":432583}
2021-11-02 11:17:33 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:17:33 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516394,"outputBytes":432587}
2021-11-02 11:17:33 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:17:33 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516461,"outputBytes":432659}
2021-11-02 11:17:33 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:17:33 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:17:33 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516084,"outputBytes":434514}
2021-11-02 11:17:34 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:17:34 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014670,"outputBytes":1607745}
2021-11-02 11:17:34 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014670,"outputBytes":2936138}
2021-11-02 11:17:34 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:19:55 INFO:  Watch: {"event":"modify","input":"src/gesture/gesture.ts"}
2021-11-02 11:19:55 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:19:55 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516439,"outputBytes":432636}
2021-11-02 11:19:55 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:19:55 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516447,"outputBytes":432640}
2021-11-02 11:19:55 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:19:55 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516514,"outputBytes":432712}
2021-11-02 11:19:55 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:19:55 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:19:55 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516137,"outputBytes":434567}
2021-11-02 11:19:55 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:19:56 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014723,"outputBytes":1607778}
2021-11-02 11:19:56 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014723,"outputBytes":2936191}
2021-11-02 11:19:56 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:20:46 INFO:  Watch: {"event":"modify","input":"src/gesture/gesture.ts"}
2021-11-02 11:20:46 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:20:46 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516386,"outputBytes":432583}
2021-11-02 11:20:46 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:20:46 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516394,"outputBytes":432587}
2021-11-02 11:20:46 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:20:46 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516461,"outputBytes":432659}
2021-11-02 11:20:46 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:20:46 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:20:46 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516084,"outputBytes":434514}
2021-11-02 11:20:47 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:20:47 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014670,"outputBytes":1607745}
2021-11-02 11:20:48 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014670,"outputBytes":2936138}
2021-11-02 11:20:48 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:25:53 INFO:  Watch: {"event":"modify","input":"src/gear/emotion.ts"}
2021-11-02 11:25:53 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:25:53 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516416,"outputBytes":432613}
2021-11-02 11:25:53 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:25:53 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516424,"outputBytes":432617}
2021-11-02 11:25:53 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:25:53 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516491,"outputBytes":432689}
2021-11-02 11:25:53 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:25:53 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:25:53 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516114,"outputBytes":434544}
2021-11-02 11:25:53 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:25:54 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014700,"outputBytes":1607762}
2021-11-02 11:25:54 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014700,"outputBytes":2936168}
2021-11-02 11:25:54 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:32:30 INFO:  Watch: {"event":"modify","input":"src/gear/emotion.ts"}
2021-11-02 11:32:30 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:32:30 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516438,"outputBytes":432635}
2021-11-02 11:32:30 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:32:30 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516446,"outputBytes":432639}
2021-11-02 11:32:30 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:32:30 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516513,"outputBytes":432711}
2021-11-02 11:32:30 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:32:30 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:32:30 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516136,"outputBytes":434566}
2021-11-02 11:32:30 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:32:31 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014722,"outputBytes":1607775}
2021-11-02 11:32:31 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014722,"outputBytes":2936190}
2021-11-02 11:32:31 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:33:48 INFO:  Watch: {"event":"modify","input":"src/face/face.ts"}
2021-11-02 11:33:48 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:33:48 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516465,"outputBytes":432662}
2021-11-02 11:33:48 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:33:48 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516473,"outputBytes":432666}
2021-11-02 11:33:48 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:33:48 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516540,"outputBytes":432738}
2021-11-02 11:33:48 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:33:48 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:33:48 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516163,"outputBytes":434593}
2021-11-02 11:33:48 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:33:49 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014749,"outputBytes":1607793}
2021-11-02 11:33:49 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014749,"outputBytes":2936217}
2021-11-02 11:33:49 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:36:07 INFO:  Watch: {"event":"modify","input":"src/gear/emotion.ts"}
2021-11-02 11:36:07 INFO:  Watch: {"event":"modify","input":"src/face/face.ts","skip":true}
2021-11-02 11:36:07 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-02 11:36:07 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516386,"outputBytes":432583}
2021-11-02 11:36:07 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-02 11:36:07 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516394,"outputBytes":432587}
2021-11-02 11:36:07 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-02 11:36:07 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516461,"outputBytes":432659}
2021-11-02 11:36:07 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-02 11:36:07 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-02 11:36:07 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516084,"outputBytes":434514}
2021-11-02 11:36:07 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-02 11:36:08 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014670,"outputBytes":1607745}
2021-11-02 11:36:08 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014670,"outputBytes":2936138}
2021-11-02 11:36:08 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3662,"outputBytes":3471}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/html","size":7842,"url":"/?backend=wasm&warmup=true&worker=false","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/css","size":107884,"url":"/icons.css","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":48222,"url":"/index.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":2936138,"url":"/dist/human.esm.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":14514,"url":"/helpers/menu.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":10901,"url":"/helpers/gl-bench.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":3406,"url":"/helpers/webrtc.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":5785,"url":"/helpers/jsonview.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4770589,"url":"/dist/human.esm.js.map","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"image/x-icon","size":261950,"url":"/favicon.ico","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:39 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":4115,"url":"/index-pwa.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":79038,"url":"/models/blazeface.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":89289,"url":"/models/facemesh.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":122025,"url":"/models/iris.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":602812,"url":"/models/handtrack.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":82231,"url":"/models/handlandmark-full.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":161813,"url":"/models/movenet-lightning.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":17980,"url":"/models/emotion.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":71432,"url":"/models/faceres.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":538928,"url":"/models/blazeface.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2599092,"url":"/models/iris.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":5431368,"url":"/models/handlandmark-full.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":820516,"url":"/models/emotion.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2955780,"url":"/models/facemesh.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":6978814,"url":"/models/faceres.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4650216,"url":"/models/movenet-lightning.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:39:41 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2964837,"url":"/models/handtrack.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:12 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/html","size":7842,"url":"/","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/css","size":107884,"url":"/icons.css","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":48222,"url":"/index.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":2936138,"url":"/dist/human.esm.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":14514,"url":"/helpers/menu.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":10901,"url":"/helpers/gl-bench.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":3406,"url":"/helpers/webrtc.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":5785,"url":"/helpers/jsonview.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4770589,"url":"/dist/human.esm.js.map","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:13 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":4115,"url":"/index-pwa.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:16 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":1778,"url":"/index-worker.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:16 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"text/javascript","size":1607745,"url":"/dist/human.js","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":79038,"url":"/models/blazeface.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":89289,"url":"/models/facemesh.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":122025,"url":"/models/iris.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":602812,"url":"/models/handtrack.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":82231,"url":"/models/handlandmark-full.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":161813,"url":"/models/movenet-lightning.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":17980,"url":"/models/emotion.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/json","size":71432,"url":"/models/faceres.json","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":538928,"url":"/models/blazeface.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2955780,"url":"/models/facemesh.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2599092,"url":"/models/iris.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":5431368,"url":"/models/handlandmark-full.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":4650216,"url":"/models/movenet-lightning.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":820516,"url":"/models/emotion.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":6978814,"url":"/models/faceres.bin","remote":"::ffff:192.168.0.200"}
2021-11-02 11:40:17 DATA:  HTTP: {"method":"GET","ver":"1.1","status":200,"mime":"application/octet-stream","size":2964837,"url":"/models/handtrack.bin","remote":"::ffff:192.168.0.200"}
2021-11-03 16:30:50 INFO:  @vladmandic/human version 2.5.0
2021-11-03 16:30:50 INFO:  User: vlado Platform: linux Arch: x64 Node: v17.0.1
2021-11-03 16:30:50 INFO:  Application: {"name":"@vladmandic/human","version":"2.5.0"}
2021-11-03 16:30:50 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2021-11-03 16:30:50 INFO:  Toolchain: {"build":"0.6.3","esbuild":"0.13.12","typescript":"4.4.4","typedoc":"0.22.7","eslint":"8.1.0"}
2021-11-03 16:30:50 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2021-11-03 16:30:50 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]}
2021-11-03 16:30:50 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-03 16:30:51 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":55,"inputBytes":516475,"outputBytes":431829}
2021-11-03 16:30:51 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-03 16:30:51 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":55,"inputBytes":516483,"outputBytes":431833}
2021-11-03 16:30:51 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-03 16:30:51 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":55,"inputBytes":516550,"outputBytes":431905}
2021-11-03 16:30:51 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-03 16:30:51 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-11-03 16:30:51 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":55,"inputBytes":516173,"outputBytes":433664}
2021-11-03 16:30:51 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2564340,"outputBytes":2499559}
2021-11-03 16:30:51 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":55,"inputBytes":3014759,"outputBytes":1607128}
2021-11-03 16:30:52 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":55,"inputBytes":3014759,"outputBytes":2935423}
2021-11-03 16:31:13 STATE: Typings: {"input":"src/human.ts","output":"types","files":48}
2021-11-03 16:31:19 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":48,"generated":true}
2021-11-03 16:31:19 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":3807,"outputBytes":3340}
2021-11-03 16:31:50 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":92,"errors":0,"warnings":0}
2021-11-03 16:31:51 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2021-11-03 16:31:51 INFO:  Done...

View File

@ -188,12 +188,19 @@ async function test(Human, inputConfig) {
else log('state', 'passed: warmup none result match');
config.warmup = 'face';
res = await testWarmup(human, 'default');
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 1 || res?.gesture?.length !== 8) log('error', 'failed: warmup face result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 1 || res?.gesture?.length !== 7) log('error', 'failed: warmup face result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
else log('state', 'passed: warmup face result match');
config.warmup = 'body';
res = await testWarmup(human, 'default');
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 1 || res?.gesture?.length !== 7) log('error', 'failed: warmup body result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 1 || res?.gesture?.length !== 6) log('error', 'failed: warmup body result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
else log('state', 'passed: warmup body result match');
log('state', 'details:', {
face: { boxScore: res.face[0].boxScore, faceScore: res.face[0].faceScore, age: res.face[0].age, gender: res.face[0].gender, genderScore: res.face[0].genderScore },
emotion: res.face[0].emotion,
body: { score: res.body[0].score, keypoints: res.body[0].keypoints.length },
hand: { boxScore: res.hand[0].boxScore, fingerScore: res.hand[0].fingerScore, keypoints: res.hand[0].keypoints.length },
gestures: res.gesture,
});
// test default config async
log('info', 'test default');
@ -201,8 +208,8 @@ async function test(Human, inputConfig) {
config.async = true;
config.cacheSensitivity = 0;
res = await testDetect(human, 'samples/in/ai-body.jpg', 'default');
if (!res || res?.face?.length !== 1 || res?.face[0].gender !== 'female') log('error', 'failed: default result face mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
else log('state', 'passed: default result face match');
if (!res || res?.face?.length !== 1 || res?.face[0].gender !== 'female') log('error', 'failed: default result face mismatch', res?.face?.length, res?.face[0].gender, res?.face[0].genderScore);
else log('state', 'passed: default result face match', res?.face?.length, res?.face[0].gender, res?.face[0].genderScore);
// test default config sync
log('info', 'test sync');
@ -210,8 +217,8 @@ async function test(Human, inputConfig) {
config.async = false;
config.cacheSensitivity = 0;
res = await testDetect(human, 'samples/in/ai-body.jpg', 'default');
if (!res || res?.face?.length !== 1 || res?.face[0].gender !== 'female') log('error', 'failed: default sync', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
else log('state', 'passed: default sync');
if (!res || res?.face?.length !== 1 || res?.face[0].gender !== 'female') log('error', 'failed: default sync', res?.face?.length, res?.face[0].gender, res?.face[0].genderScore);
else log('state', 'passed: default sync', res?.face?.length, res?.face[0].gender, res?.face[0].genderScore);
// test image processing
const img1 = await human.image(null);
@ -240,7 +247,7 @@ async function test(Human, inputConfig) {
res1 = human.similarity(desc1, desc1);
res2 = human.similarity(desc1, desc2);
res3 = human.similarity(desc1, desc3);
if (res1 < 1 || res2 < 0.55 || res3 < 0.5) log('error', 'failed: face similarity', { similarity: [res1, res2, res3], descriptors: [desc1?.length, desc2?.length, desc3?.length] });
if (res1 < 1 || res2 < 0.50 || res3 < 0.50) log('error', 'failed: face similarity', { similarity: [res1, res2, res3], descriptors: [desc1?.length, desc2?.length, desc3?.length] });
else log('state', 'passed: face similarity', { similarity: [res1, res2, res3], descriptors: [desc1?.length, desc2?.length, desc3?.length] });
// test face matching
@ -271,7 +278,7 @@ async function test(Human, inputConfig) {
config.body = { minConfidence: 0.0001 };
config.hand = { minConfidence: 0.0001 };
res = await testDetect(human, 'samples/in/ai-body.jpg', 'default');
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 2 || res?.gesture?.length !== 9) log('error', 'failed: sensitive result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 2 || res?.gesture?.length !== 8) log('error', 'failed: sensitive result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
else log('state', 'passed: sensitive result match');
// test sensitive details face
@ -280,7 +287,7 @@ async function test(Human, inputConfig) {
log('error', 'failed: sensitive face result mismatch', res?.face?.length, face?.box?.length, face?.mesh?.length, face?.embedding?.length, face?.rotation?.matrix?.length);
} else log('state', 'passed: sensitive face result match');
if (!face || face?.emotion?.length < 3) log('error', 'failed: sensitive face emotion result mismatch', face?.emotion.length);
else log('state', 'passed: sensitive face emotion result mismatch', face?.emotion.length);
else log('state', 'passed: sensitive face emotion result', face?.emotion.length);
// test sensitive details body
const body = res && res.body ? res.body[0] : null;

File diff suppressed because it is too large Load Diff

View File

@ -15,29 +15,33 @@ export declare const getRawBox: (box: any, input: any) => Box;
export declare const scaleBoxCoordinates: (box: any, factor: any) => {
startPoint: number[];
endPoint: number[];
landmarks: any;
confidence: any;
};
export declare const cutBoxFromImageAndResize: (box: any, image: any, cropSize: any) => any;
export declare const enlargeBox: (box: any, factor?: number) => {
export declare const enlargeBox: (box: any, factor: any) => {
startPoint: Point;
endPoint: Point;
landmarks: any;
confidence: any;
};
export declare const squarifyBox: (box: any) => {
startPoint: Point;
endPoint: Point;
landmarks: any;
confidence: any;
};
export declare const calculateLandmarksBoundingBox: (landmarks: any) => {
startPoint: number[];
endPoint: number[];
startPoint: Point;
endPoint: Point;
landmarks: any;
};
export declare const IDENTITY_MATRIX: number[][];
export declare const fixedRotationMatrix: number[][];
export declare const normalizeRadians: (angle: any) => number;
export declare const computeRotation: (point1: any, point2: any) => number;
export declare const radToDegrees: (rad: any) => number;
export declare const buildTranslationMatrix: (x: any, y: any) => any[][];
export declare const dot: (v1: any, v2: any) => number;
export declare const dot: (v1: number[], v2: number[]) => number;
export declare const getColumnFrom2DArr: (arr: any, columnIndex: any) => number[];
export declare const multiplyTransformMatrices: (mat1: any, mat2: any) => number[][];
export declare const buildRotationMatrix: (rotation: any, center: any) => number[][];
@ -45,5 +49,5 @@ export declare const invertTransformMatrix: (matrix: any) => any[][];
export declare const rotatePoint: (homogeneousCoordinate: any, rotationMatrix: any) => number[];
export declare const xyDistanceBetweenPoints: (a: any, b: any) => number;
export declare function generateAnchors(inputSize: any): [number, number][];
export declare function transformRawCoords(rawCoords: any, box: any, angle: any, rotationMatrix: any, inputSize: any): any;
export declare function transformRawCoords(coordsRaw: any, box: any, angle: any, rotationMatrix: any, inputSize: any): any;
export declare function correctFaceRotation(box: any, input: any, inputSize: any): any[];

View File

@ -8,6 +8,7 @@ export declare const getEyeBox: (rawCoords: any, face: any, eyeInnerCornerIndex:
startPoint: Point;
endPoint: Point;
landmarks: any;
confidence: any;
};
boxSize: [number, number];
crop: any;