implement face attention model

pull/356/head
Vladimir Mandic 2022-04-11 11:45:24 -04:00
parent fd0d6558f5
commit 4ab0a9d18f
24 changed files with 694 additions and 64 deletions

View File

@ -1,6 +1,6 @@
# @vladmandic/human
Version: **2.6.5**
Version: **2.7.0**
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
Author: **Vladimir Mandic <mandic00@live.com>**
@ -9,8 +9,9 @@
## Changelog
### **HEAD -> main** 2022/04/05 mandic00@live.com
### **HEAD -> main** 2022/04/10 mandic00@live.com
- rebuild
### **2.6.5** 2022/04/01 mandic00@live.com

View File

@ -25,6 +25,6 @@
<video id="video" playsinline style="display: none"></video>
<pre id="status" style="position: absolute; top: 12px; right: 20px; background-color: grey; padding: 8px; box-shadow: 2px 2px black"></pre>
<pre id="log" style="padding: 8px"></pre>
<div id="performance" style="position: absolute; bottom: 0; width: 100%; padding: 8px; font-size: 0.8rem;"></div>
<div id="performance" style="position: absolute; bottom: 1rem; width: 100%; padding: 8px; font-size: 0.8rem;"></div>
</body>
</html>

View File

@ -4,8 +4,8 @@
author: <https://github.com/vladmandic>'
*/
import{Human as p}from"../../dist/human.esm.js";var w={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!0},gesture:{enabled:!0}},t=new p(w);t.env.perfadd=!1;t.draw.options.font='small-caps 18px "Lato"';t.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},i={detect:0,draw:0,tensors:0},d={detect:0,draw:0},s=(...a)=>{e.log.innerText+=a.join(" ")+`
`,console.log(...a)},r=a=>e.fps.innerText=a,b=a=>e.perf.innerText="tensors:"+t.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth}}},n=await navigator.mediaDevices.getUserMedia(a),m=new Promise(f=>{e.video.onloadeddata=()=>f(!0)});e.video.srcObject=n,e.video.play(),await m,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight;let o=n.getVideoTracks()[0],u=o.getCapabilities?o.getCapabilities():"",v=o.getSettings?o.getSettings():"",g=o.getConstraints?o.getConstraints():"";s("video:",e.video.videoWidth,e.video.videoHeight,o.label,{stream:n,track:o,settings:v,constraints:g,capabilities:u}),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function c(){if(!e.video.paused){await t.detect(e.video);let n=t.tf.memory().numTensors;n-i.tensors!==0&&s("allocated tensors:",n-i.tensors),i.tensors=n}let a=t.now();d.detect=1e3/(a-i.detect),i.detect=a,requestAnimationFrame(c)}async function l(){if(!e.video.paused){let n=await t.next(t.result);await t.draw.canvas(e.video,e.canvas),await t.draw.all(e.canvas,n),b(n.performance)}let a=t.now();d.draw=1e3/(a-i.draw),i.draw=a,r(e.video.paused?"paused":`fps: ${d.detect.toFixed(1).padStart(5," ")} detect | ${d.draw.toFixed(1).padStart(5," ")} draw`),setTimeout(l,30)}async function y(){s("human version:",t.version,"| tfjs version:",t.tf.version["tfjs-core"]),s("platform:",t.env.platform,"| agent:",t.env.agent),r("loading..."),await t.load(),s("backend:",t.tf.getBackend(),"| available:",t.env.backends),s("loaded models:",Object.values(t.models).filter(a=>a!==null).length),r("initializing..."),await t.warmup(),await h(),await c(),await l()}window.onload=y;
import{Human as p}from"../../dist/human.esm.js";var w={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1},cacheSensitivity:0,face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!0},gesture:{enabled:!0}},t=new p(w);t.env.perfadd=!1;t.draw.options.font='small-caps 18px "Lato"';t.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},i={detect:0,draw:0,tensors:0},d={detect:0,draw:0},s=(...a)=>{e.log.innerText+=a.join(" ")+`
`,console.log(...a)},r=a=>e.fps.innerText=a,b=a=>e.perf.innerText="tensors:"+t.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth}}},n=await navigator.mediaDevices.getUserMedia(a),m=new Promise(f=>{e.video.onloadeddata=()=>f(!0)});e.video.srcObject=n,e.video.play(),await m,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight;let o=n.getVideoTracks()[0],v=o.getCapabilities?o.getCapabilities():"",u=o.getSettings?o.getSettings():"",g=o.getConstraints?o.getConstraints():"";s("video:",e.video.videoWidth,e.video.videoHeight,o.label,{stream:n,track:o,settings:u,constraints:g,capabilities:v}),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function c(){if(!e.video.paused){await t.detect(e.video);let n=t.tf.memory().numTensors;n-i.tensors!==0&&s("allocated tensors:",n-i.tensors),i.tensors=n}let a=t.now();d.detect=1e3/(a-i.detect),i.detect=a,requestAnimationFrame(c)}async function l(){if(!e.video.paused){let n=await t.next(t.result);await t.draw.canvas(e.video,e.canvas),await t.draw.all(e.canvas,n),b(n.performance)}let a=t.now();d.draw=1e3/(a-i.draw),i.draw=a,r(e.video.paused?"paused":`fps: ${d.detect.toFixed(1).padStart(5," ")} detect | ${d.draw.toFixed(1).padStart(5," ")} draw`),setTimeout(l,30)}async function y(){s("human version:",t.version,"| tfjs version:",t.tf.version["tfjs-core"]),s("platform:",t.env.platform,"| agent:",t.env.agent),r("loading..."),await t.load(),s("backend:",t.tf.getBackend(),"| available:",t.env.backends),s("loaded models:",Object.values(t.models).filter(a=>a!==null).length),r("initializing..."),await t.warmup(),await h(),await c(),await l()}window.onload=y;
/**
* Human demo for browsers
* @default Human Library

File diff suppressed because one or more lines are too long

View File

@ -14,7 +14,8 @@ const humanConfig: Partial<Config> = { // user configuration for human, used to
// async: true,
modelBasePath: '../../models',
filter: { enabled: true, equalization: false },
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },
cacheSensitivity: 0,
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },
body: { enabled: true },
hand: { enabled: true },
object: { enabled: true },

View File

@ -1,6 +1,6 @@
{
"name": "@vladmandic/human",
"version": "2.6.5",
"version": "2.7.0",
"description": "Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition",
"sideEffects": false,
"main": "dist/human.node.js",
@ -86,7 +86,7 @@
"rimraf": "^3.0.2",
"seedrandom": "^3.0.5",
"tslib": "^2.3.1",
"typedoc": "0.22.14",
"typedoc": "0.22.15",
"typescript": "4.6.3"
}
}

View File

@ -40,6 +40,9 @@ export interface FaceMeshConfig extends GenericConfig {}
/** Iris part of face configuration */
export interface FaceIrisConfig extends GenericConfig {}
/** Attention part of face configuration */
export interface FaceAttentionConfig extends GenericConfig {}
/** Description or face embedding part of face configuration
* - also used by age and gender detection
*/
@ -64,6 +67,7 @@ export interface FaceLivenessConfig extends GenericConfig {}
export interface FaceConfig extends GenericConfig {
detector: Partial<FaceDetectorConfig>,
mesh: Partial<FaceMeshConfig>,
attention: Partial<FaceAttentionConfig>,
iris: Partial<FaceIrisConfig>,
description: Partial<FaceDescriptionConfig>,
emotion: Partial<FaceEmotionConfig>,
@ -349,6 +353,10 @@ const config: Config = {
enabled: true,
modelPath: 'facemesh.json',
},
attention: {
enabled: false,
modelPath: 'facemesh-attention.json',
},
iris: {
enabled: true,
modelPath: 'iris.json',

51
src/draw/body.ts Normal file
View File

@ -0,0 +1,51 @@
import { mergeDeep } from '../util/util';
import { getCanvasContext, rect, point, curves, colorDepth } from './primitives';
import { options } from './options';
import type { BodyResult } from '../result';
import type { AnyCanvas, DrawOptions } from '../exports';
/** draw detected bodies */
export async function body(inCanvas: AnyCanvas, result: Array<BodyResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
if (!ctx) return;
ctx.lineJoin = 'round';
for (let i = 0; i < result.length; i++) {
ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color;
ctx.lineWidth = localOptions.lineWidth;
ctx.font = localOptions.font;
if (localOptions.drawBoxes && result[i].box && result[i].box?.length === 4) {
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
if (localOptions.drawLabels) {
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
}
if (localOptions.drawPoints && result[i].keypoints) {
for (let pt = 0; pt < result[i].keypoints.length; pt++) {
if (!result[i].keypoints[pt].score || (result[i].keypoints[pt].score === 0)) continue;
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position[2] ? colorDepth(result[i].keypoints[pt].position[2] || 0) : localOptions.color;
point(ctx, result[i].keypoints[pt].position[0], result[i].keypoints[pt].position[1], 0, localOptions);
}
}
if (localOptions.drawLabels && result[i].keypoints) {
ctx.font = localOptions.font;
for (const pt of result[i].keypoints) {
if (!pt.score || (pt.score === 0)) continue;
ctx.fillStyle = localOptions.useDepth && pt.position[2] ? colorDepth(pt.position[2]) : localOptions.color;
ctx.fillText(`${pt.part} ${Math.trunc(100 * pt.score)}%`, pt.position[0] + 4, pt.position[1] + 4);
}
}
if (localOptions.drawPolygons && result[i].keypoints && result[i].annotations) {
for (const part of Object.values(result[i].annotations)) {
for (const connected of part) curves(ctx, connected, localOptions);
}
}
}
}

78
src/draw/draw.ts Normal file
View File

@ -0,0 +1,78 @@
/**
* Module that implements helper draw functions, exposed as human.draw
*/
import { mergeDeep, now } from '../util/util';
import { env } from '../util/env';
import { getCanvasContext, rect } from './primitives';
import { options } from './options';
import { face } from './face';
import { body } from './body';
import { hand } from './hand';
import { object } from './object';
import { gesture } from './gesture';
import type { Result, PersonResult } from '../result';
import type { AnyCanvas, DrawOptions } from '../exports';
let drawTime = 0;
export { options } from './options';
export { face } from './face';
export { body } from './body';
export { hand } from './hand';
export { object } from './object';
export { gesture } from './gesture';
/** draw combined person results instead of individual detection result objects */
export async function person(inCanvas: AnyCanvas, result: Array<PersonResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
if (!ctx) return;
ctx.lineJoin = 'round';
ctx.font = localOptions.font;
for (let i = 0; i < result.length; i++) {
if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color;
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
if (localOptions.drawLabels) {
const label = `person #${i}`;
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText(label, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.stroke();
}
}
}
/** draw processed canvas */
export async function canvas(input: AnyCanvas | HTMLImageElement | HTMLVideoElement, output: AnyCanvas) {
if (!input || !output) return;
const ctx = getCanvasContext(output);
if (!ctx) return;
ctx.drawImage(input, 0, 0);
}
/** meta-function that performs draw for: canvas, face, body, hand */
export async function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>) {
if (!result || !result.performance || !result || !inCanvas) return null;
const timeStamp = now();
const localOptions = mergeDeep(options, drawOptions);
const promise = Promise.all([
face(inCanvas, result.face, localOptions),
body(inCanvas, result.body, localOptions),
hand(inCanvas, result.hand, localOptions),
object(inCanvas, result.object, localOptions),
gesture(inCanvas, result.gesture, localOptions), // gestures do not have buffering
// person(inCanvas, result.persons, localOptions); // already included above
]);
drawTime = env.perfadd ? drawTime + Math.round(now() - timeStamp) : Math.round(now() - timeStamp);
result.performance.draw = drawTime;
return promise;
}

134
src/draw/face.ts Normal file
View File

@ -0,0 +1,134 @@
import { TRI468 as triangulation } from '../face/facemeshcoords';
import { mergeDeep } from '../util/util';
import { getCanvasContext, rad2deg, rect, point, lines, arrow } from './primitives';
import { options } from './options';
import type { FaceResult } from '../result';
import type { AnyCanvas, DrawOptions } from '../exports';
/** draw detected faces */
export async function face(inCanvas: AnyCanvas, result: Array<FaceResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
if (!ctx) return;
for (const f of result) {
ctx.font = localOptions.font;
ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color;
if (localOptions.drawBoxes) rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
if (localOptions.drawLabels) {
// silly hack since fillText does not suport new line
const labels:string[] = [];
labels.push(`face: ${Math.trunc(100 * f.score)}%`);
if (f.genderScore) labels.push(`${f.gender || ''} ${Math.trunc(100 * f.genderScore)}%`);
if (f.age) labels.push(`age: ${f.age || ''}`);
if (f.iris) labels.push(`distance: ${f.iris}`);
if (f.real) labels.push(`real: ${Math.trunc(100 * f.real)}%`);
if (f.live) labels.push(`live: ${Math.trunc(100 * f.live)}%`);
if (f.emotion && f.emotion.length > 0) {
const emotion = f.emotion.map((a) => `${Math.trunc(100 * a.score)}% ${a.emotion}`);
if (emotion.length > 3) emotion.length = 3;
labels.push(emotion.join(' '));
}
if (f.rotation && f.rotation.angle && f.rotation.gaze) {
if (f.rotation.angle.roll) labels.push(`roll: ${rad2deg(f.rotation.angle.roll)}° yaw:${rad2deg(f.rotation.angle.yaw)}° pitch:${rad2deg(f.rotation.angle.pitch)}°`);
if (f.rotation.gaze.bearing) labels.push(`gaze: ${rad2deg(f.rotation.gaze.bearing)}°`);
}
if (labels.length === 0) labels.push('face');
ctx.fillStyle = localOptions.color;
for (let i = labels.length - 1; i >= 0; i--) {
const x = Math.max(f.box[0], 0);
const y = i * localOptions.lineHeight + f.box[1];
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(labels[i], x + 5, y + 16);
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText(labels[i], x + 4, y + 15);
}
}
// ctx.lineWidth = localOptions.lineWidth;
ctx.lineWidth = 2;
if (f.mesh && f.mesh.length > 0) {
if (localOptions.drawPoints) {
const length = Math.max(468, f.mesh.length);
for (let i = 0; i < length; i++) point(ctx, f.mesh[i][0], f.mesh[i][1], f.mesh[i][2], localOptions);
}
if (localOptions.drawAttention && f.mesh.length > 468) {
for (let i = 468; i < f.mesh.length; i++) point(ctx, f.mesh[i][0], f.mesh[i][1], -255, localOptions);
}
if (localOptions.drawPolygons) {
if (f.mesh.length > 450) {
for (let i = 0; i < triangulation.length / 3; i++) {
const points = [
triangulation[i * 3 + 0],
triangulation[i * 3 + 1],
triangulation[i * 3 + 2],
].map((index) => f.mesh[index]);
lines(ctx, points, localOptions);
}
}
// iris: array[center, left, top, right, bottom]
if (f.annotations && f.annotations['leftEyeIris'] && f.annotations['leftEyeIris'][0]) {
ctx.strokeStyle = localOptions.useDepth ? 'rgba(255, 200, 255, 0.3)' : localOptions.color;
ctx.beginPath();
const sizeX = Math.abs(f.annotations['leftEyeIris'][3][0] - f.annotations['leftEyeIris'][1][0]) / 2;
const sizeY = Math.abs(f.annotations['leftEyeIris'][4][1] - f.annotations['leftEyeIris'][2][1]) / 2;
ctx.ellipse(f.annotations['leftEyeIris'][0][0], f.annotations['leftEyeIris'][0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);
ctx.stroke();
if (localOptions.fillPolygons) {
ctx.fillStyle = localOptions.useDepth ? 'rgba(255, 255, 200, 0.3)' : localOptions.color;
ctx.fill();
}
}
if (f.annotations && f.annotations['rightEyeIris'] && f.annotations['rightEyeIris'][0]) {
ctx.strokeStyle = localOptions.useDepth ? 'rgba(255, 200, 255, 0.3)' : localOptions.color;
ctx.beginPath();
const sizeX = Math.abs(f.annotations['rightEyeIris'][3][0] - f.annotations['rightEyeIris'][1][0]) / 2;
const sizeY = Math.abs(f.annotations['rightEyeIris'][4][1] - f.annotations['rightEyeIris'][2][1]) / 2;
ctx.ellipse(f.annotations['rightEyeIris'][0][0], f.annotations['rightEyeIris'][0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);
ctx.stroke();
if (localOptions.fillPolygons) {
ctx.fillStyle = localOptions.useDepth ? 'rgba(255, 255, 200, 0.3)' : localOptions.color;
ctx.fill();
}
}
if (localOptions.drawGaze && f.rotation?.angle && typeof Path2D !== 'undefined') {
ctx.strokeStyle = 'pink';
const valX = (f.box[0] + f.box[2] / 2) - (f.box[3] * rad2deg(f.rotation.angle.yaw) / 90);
const valY = (f.box[1] + f.box[3] / 2) + (f.box[2] * rad2deg(f.rotation.angle.pitch) / 90);
const pathV = new Path2D(`
M ${f.box[0] + f.box[2] / 2} ${f.box[1]}
C
${valX} ${f.box[1]},
${valX} ${f.box[1] + f.box[3]},
${f.box[0] + f.box[2] / 2} ${f.box[1] + f.box[3]}
`);
const pathH = new Path2D(`
M ${f.box[0]} ${f.box[1] + f.box[3] / 2}
C
${f.box[0]} ${valY},
${f.box[0] + f.box[2]} ${valY},
${f.box[0] + f.box[2]} ${f.box[1] + f.box[3] / 2}
`);
ctx.stroke(pathH);
ctx.stroke(pathV);
}
if (localOptions.drawGaze && f.rotation?.gaze?.strength && f.rotation?.gaze?.bearing && f.annotations['leftEyeIris'] && f.annotations['rightEyeIris'] && f.annotations['leftEyeIris'][0] && f.annotations['rightEyeIris'][0]) {
ctx.strokeStyle = 'pink';
ctx.fillStyle = 'pink';
const leftGaze = [
f.annotations['leftEyeIris'][0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),
f.annotations['leftEyeIris'][0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),
];
arrow(ctx, [f.annotations['leftEyeIris'][0][0], f.annotations['leftEyeIris'][0][1]], [leftGaze[0], leftGaze[1]], 4);
const rightGaze = [
f.annotations['rightEyeIris'][0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),
f.annotations['rightEyeIris'][0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),
];
arrow(ctx, [f.annotations['rightEyeIris'][0][0], f.annotations['rightEyeIris'][0][1]], [rightGaze[0], rightGaze[1]], 4);
}
}
}
}
}

34
src/draw/gesture.ts Normal file
View File

@ -0,0 +1,34 @@
import { mergeDeep } from '../util/util';
import { getCanvasContext } from './primitives';
import { options } from './options';
import type { GestureResult } from '../result';
import type { AnyCanvas, DrawOptions } from '../exports';
/** draw detected gestures */
export async function gesture(inCanvas: AnyCanvas, result: Array<GestureResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
if (localOptions.drawGestures) {
const ctx = getCanvasContext(inCanvas);
if (!ctx) return;
ctx.font = localOptions.font;
ctx.fillStyle = localOptions.color;
let i = 1;
for (let j = 0; j < result.length; j++) {
let where: unknown[] = []; // what&where is a record
let what: unknown[] = []; // what&where is a record
[where, what] = Object.entries(result[j]);
if ((what.length > 1) && ((what[1] as string).length > 0)) {
const who = where[1] as number > 0 ? `#${where[1]}` : '';
const label = `${where[0]} ${who}: ${what[1]}`;
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, 8, 2 + (i * localOptions.lineHeight));
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText(label, 6, 0 + (i * localOptions.lineHeight));
i += 1;
}
}
}
}

74
src/draw/hand.ts Normal file
View File

@ -0,0 +1,74 @@
import { mergeDeep } from '../util/util';
import { getCanvasContext, rect, point, colorDepth } from './primitives';
import { options } from './options';
import type { HandResult } from '../result';
import type { AnyCanvas, DrawOptions, Point } from '../exports';
/** draw detected hands */
export async function hand(inCanvas: AnyCanvas, result: Array<HandResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
if (!ctx) return;
ctx.lineJoin = 'round';
ctx.font = localOptions.font;
for (const h of result) {
if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color;
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
if (localOptions.drawLabels) {
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(`hand:${Math.trunc(100 * h.score)}%`, h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]); // can use h.label
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText(`hand:${Math.trunc(100 * h.score)}%`, h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]); // can use h.label
}
ctx.stroke();
}
if (localOptions.drawPoints) {
if (h.keypoints && h.keypoints.length > 0) {
for (const pt of h.keypoints) {
ctx.fillStyle = localOptions.useDepth ? colorDepth(pt[2] || 0) : localOptions.color;
point(ctx, pt[0], pt[1], 0, localOptions);
}
}
}
if (localOptions.drawLabels && h.annotations) {
const addHandLabel = (part: Array<Point>, title: string) => {
if (!part || part.length === 0 || !part[0]) return;
const z = part[part.length - 1][2] || 0;
ctx.fillStyle = localOptions.useDepth ? colorDepth(z) : localOptions.color;
ctx.fillText(title, part[part.length - 1][0] + 4, part[part.length - 1][1] + 4);
};
ctx.font = localOptions.font;
addHandLabel(h.annotations['index'], 'index');
addHandLabel(h.annotations['middle'], 'middle');
addHandLabel(h.annotations['ring'], 'ring');
addHandLabel(h.annotations['pinky'], 'pinky');
addHandLabel(h.annotations['thumb'], 'thumb');
addHandLabel(h.annotations['palm'], 'palm');
}
if (localOptions.drawPolygons && h.annotations) {
const addHandLine = (part: Array<Point>) => {
if (!part || part.length === 0 || !part[0]) return;
for (let i = 0; i < part.length; i++) {
ctx.beginPath();
const z = part[i][2] || 0;
ctx.strokeStyle = localOptions.useDepth ? colorDepth(i * z) : localOptions.color;
ctx.moveTo(part[i > 0 ? i - 1 : 0][0], part[i > 0 ? i - 1 : 0][1]);
ctx.lineTo(part[i][0], part[i][1]);
ctx.stroke();
}
};
ctx.lineWidth = localOptions.lineWidth;
addHandLine(h.annotations['index']);
addHandLine(h.annotations['middle']);
addHandLine(h.annotations['ring']);
addHandLine(h.annotations['pinky']);
addHandLine(h.annotations['thumb']);
// addPart(h.annotations.palm);
}
}
}

32
src/draw/object.ts Normal file
View File

@ -0,0 +1,32 @@
import { mergeDeep } from '../util/util';
import { getCanvasContext, rect } from './primitives';
import { options } from './options';
import type { ObjectResult } from '../result';
import type { AnyCanvas, DrawOptions } from '../exports';
/** draw detected objects */
export async function object(inCanvas: AnyCanvas, result: Array<ObjectResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
if (!ctx) return;
ctx.lineJoin = 'round';
ctx.font = localOptions.font;
for (const h of result) {
if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color;
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
if (localOptions.drawLabels) {
const label = `${h.label} ${Math.round(100 * h.score)}%`;
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText(label, h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
}
ctx.stroke();
}
}
}

66
src/draw/options.ts Normal file
View File

@ -0,0 +1,66 @@
/** Draw Options
* - Accessed via `human.draw.options` or provided per each draw method as the drawOptions optional parameter
*/
export type DrawOptions = {
/** draw line color */
color: string,
/** alpha value used for lines */
alpha: number,
/** label color */
labelColor: string,
/** label shadow color */
shadowColor: string,
/** label font */
font: string,
/** line spacing between labels */
lineHeight: number,
/** line width for drawn lines */
lineWidth: number,
/** size of drawn points */
pointSize: number,
/** draw rounded boxes by n pixels */
roundRect: number,
/** should points be drawn? */
drawPoints: boolean,
/** should labels be drawn? */
drawLabels: boolean,
/** should face attention keypoints be highlighted */
drawAttention: boolean;
/** should detected gestures be drawn? */
drawGestures: boolean,
/** should draw boxes around detection results? */
drawBoxes: boolean,
/** should draw polygons from detection points? */
drawPolygons: boolean,
/** should draw gaze arrows? */
drawGaze: boolean,
/** should fill polygons? */
fillPolygons: boolean,
/** use z-coordinate when available */
useDepth: boolean,
/** should lines be curved? */
useCurves: boolean,
}
/** currently set draw options {@link DrawOptions} */
export const options: DrawOptions = {
color: <string>'rgba(173, 216, 230, 0.6)', // 'lightblue' with light alpha channel
labelColor: <string>'rgba(173, 216, 230, 1)', // 'lightblue' with dark alpha channel
shadowColor: <string>'black',
alpha: 0.5,
font: <string>'small-caps 16px "Segoe UI"',
lineHeight: <number>18,
lineWidth: <number>4,
pointSize: <number>2,
roundRect: <number>8,
drawPoints: <boolean>false,
drawLabels: <boolean>true,
drawBoxes: <boolean>true,
drawAttention: <boolean>true,
drawGestures: <boolean>true,
drawPolygons: <boolean>true,
drawGaze: <boolean>true,
fillPolygons: <boolean>false,
useDepth: <boolean>true,
useCurves: <boolean>false,
};

115
src/draw/primitives.ts Normal file
View File

@ -0,0 +1,115 @@
import { log } from '../util/util';
import type { AnyCanvas } from '../exports';
import type { Point } from '../result';
import { options, DrawOptions } from './options';
export const getCanvasContext = (input: AnyCanvas) => {
if (!input) log('draw error: invalid canvas');
else if (!input.getContext) log('draw error: canvas context not defined');
else {
const ctx = input.getContext('2d');
if (!ctx) log('draw error: cannot get canvas context');
else return ctx;
}
return null;
};
export const rad2deg = (theta: number) => Math.round((theta * 180) / Math.PI);
export const colorDepth = (z: number, rgb: [boolean, boolean, boolean] = [true, true, false]): string => {
const r = rgb[0] ? 127 + Math.trunc(3 * z) : 255;
const g = rgb[1] ? 127 - Math.trunc(3 * z) : 255;
const b = rgb[2] ? 127 - Math.trunc(3 * z) : 255;
return `rgba(${r}, ${g}, ${b}, ${options.alpha})`;
};
export function point(ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D, x: number, y: number, z: number | undefined, localOptions: DrawOptions) {
z = z || 0;
ctx.fillStyle = localOptions.useDepth && z ? colorDepth(z, z === -255 ? [true, false, true] : [true, false, false]) : localOptions.color;
ctx.beginPath();
ctx.arc(x, y, localOptions.pointSize, 0, 2 * Math.PI);
ctx.fill();
}
export function rect(ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D, x: number, y: number, width: number, height: number, localOptions: DrawOptions) {
ctx.beginPath();
ctx.lineWidth = localOptions.lineWidth;
if (localOptions.useCurves) {
const cx = (x + x + width) / 2;
const cy = (y + y + height) / 2;
ctx.ellipse(cx, cy, width / 2, height / 2, 0, 0, 2 * Math.PI);
} else {
ctx.moveTo(x + localOptions.roundRect, y);
ctx.lineTo(x + width - localOptions.roundRect, y);
ctx.quadraticCurveTo(x + width, y, x + width, y + localOptions.roundRect);
ctx.lineTo(x + width, y + height - localOptions.roundRect);
ctx.quadraticCurveTo(x + width, y + height, x + width - localOptions.roundRect, y + height);
ctx.lineTo(x + localOptions.roundRect, y + height);
ctx.quadraticCurveTo(x, y + height, x, y + height - localOptions.roundRect);
ctx.lineTo(x, y + localOptions.roundRect);
ctx.quadraticCurveTo(x, y, x + localOptions.roundRect, y);
ctx.closePath();
}
ctx.stroke();
}
export function lines(ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D, points: Point[], localOptions: DrawOptions) {
if (points.length < 2) return;
ctx.beginPath();
ctx.moveTo(points[0][0], points[0][1]);
for (const pt of points) {
const z = pt[2] || 0;
ctx.strokeStyle = localOptions.useDepth && z !== 0 ? colorDepth(z) : localOptions.color;
ctx.fillStyle = localOptions.useDepth && z !== 0 ? colorDepth(z) : localOptions.color;
ctx.lineTo(pt[0], Math.round(pt[1]));
}
ctx.stroke();
if (localOptions.fillPolygons) {
ctx.closePath();
ctx.fill();
}
}
export function curves(ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D, points: Point[], localOptions: DrawOptions) {
if (points.length < 2) return;
ctx.lineWidth = localOptions.lineWidth;
if (!localOptions.useCurves || points.length <= 2) {
lines(ctx, points, localOptions);
return;
}
ctx.moveTo(points[0][0], points[0][1]);
for (let i = 0; i < points.length - 2; i++) {
const xc = (points[i][0] + points[i + 1][0]) / 2;
const yc = (points[i][1] + points[i + 1][1]) / 2;
ctx.quadraticCurveTo(points[i][0], points[i][1], xc, yc);
}
ctx.quadraticCurveTo(points[points.length - 2][0], points[points.length - 2][1], points[points.length - 1][0], points[points.length - 1][1]);
ctx.stroke();
if (localOptions.fillPolygons) {
ctx.closePath();
ctx.fill();
}
}
export function arrow(ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D, from: Point, to: Point, radius = 5) {
let angle;
let x;
let y;
ctx.beginPath();
ctx.moveTo(from[0], from[1]);
ctx.lineTo(to[0], to[1]);
angle = Math.atan2(to[1] - from[1], to[0] - from[0]);
x = radius * Math.cos(angle) + to[0];
y = radius * Math.sin(angle) + to[1];
ctx.moveTo(x, y);
angle += (1.0 / 3.0) * (2 * Math.PI);
x = radius * Math.cos(angle) + to[0];
y = radius * Math.sin(angle) + to[1];
ctx.lineTo(x, y);
angle += (1.0 / 3.0) * (2 * Math.PI);
x = radius * Math.cos(angle) + to[0];
y = radius * Math.sin(angle) + to[1];
ctx.lineTo(x, y);
ctx.closePath();
ctx.stroke();
ctx.fill();
}

View File

@ -9,7 +9,7 @@ export * from './result';
/* Explict reexport of main @tensorflow/tfjs types */
export type { Tensor, TensorLike, GraphModel, Rank } from './tfjs/types';
export type { DrawOptions } from './util/draw';
export type { DrawOptions } from './draw/options';
export type { Descriptor } from './face/match';
export type { Box, Point } from './result';
export type { Models } from './models';

19
src/face/attention.ts Normal file
View File

@ -0,0 +1,19 @@
import type { Tensor } from '../tfjs/types';
export async function augment(rawCoords, results: Tensor[]) {
const t: Record<string, Float32Array> = {
eyeL: results[0].dataSync() as Float32Array, // 71 x 2d // output_left_eye
eyeR: results[6].dataSync() as Float32Array, // 71 x 2d // output_right_eye
irisL: results[3].dataSync() as Float32Array, // 5 x 2d // output_left_iris
irisR: results[1].dataSync() as Float32Array, // 5 x 2d // output_right_iris
lips: results[5].dataSync() as Float32Array, // 80 x 2d // output_lips
// flag: results[4], // already processed in parent // conv_faceflag
// mesh: results[2], // already have it in rawCoords // output_mesh_identity
};
for (let i = 0; i < t.lips.length / 2; i++) rawCoords.push([t.lips[2 * i + 0], t.lips[2 * i + 1], 0]);
for (let i = 0; i < t.eyeL.length / 2; i++) rawCoords.push([t.eyeL[2 * i + 0], t.eyeL[2 * i + 1], 0]);
for (let i = 0; i < t.eyeR.length / 2; i++) rawCoords.push([t.eyeR[2 * i + 0], t.eyeR[2 * i + 1], 0]);
for (let i = 0; i < t.irisL.length / 2; i++) rawCoords.push([t.irisL[2 * i + 0], t.irisL[2 * i + 1], 0]);
for (let i = 0; i < t.irisR.length / 2; i++) rawCoords.push([t.irisR[2 * i + 0], t.irisR[2 * i + 1], 0]);
return rawCoords;
}

View File

@ -14,7 +14,7 @@ import { env } from '../util/env';
import type { Point } from '../result';
const keypointsCount = 6;
const faceBoxScaleFactor = 1.2;
const faceBoxScaleFactor = 1.4;
let model: GraphModel | null;
let anchors: Tensor | null = null;
let inputSize = 0;

View File

@ -14,6 +14,7 @@ import * as blazeface from './blazeface';
import * as util from './facemeshutil';
import * as coords from './facemeshcoords';
import * as iris from './iris';
import * as attention from './attention';
import { histogramEqualization } from '../image/enhance';
import { env } from '../util/env';
import type { GraphModel, Tensor } from '../tfjs/types';
@ -78,20 +79,27 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
((box.startPoint[1] + box.endPoint[1])) / 2 + ((box.endPoint[1] + box.startPoint[1]) * pt[1] / blazeface.size()),
]);
face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]);
for (const key of Object.keys(coords.blazeFaceLandmarks)) face.annotations[key] = [face.mesh[coords.blazeFaceLandmarks[key] as number]]; // add annotations
for (const key of Object.keys(coords.blazeFaceLandmarks)) {
face.annotations[key] = [face.mesh[coords.blazeFaceLandmarks[key] as number]]; // add annotations
}
} else if (!model) { // mesh enabled, but not loaded
if (config.debug) log('face mesh detection requested, but model is not loaded');
} else { // mesh enabled
const [contours, confidence, contourCoords] = model.execute(face.tensor as Tensor) as Array<Tensor>; // first returned tensor represents facial contours which are already included in the coordinates.
const results = model.execute(face.tensor as Tensor) as Array<Tensor>;
const confidence = results.find((t) => t.shape[t.shape.length - 1] === 1) as Tensor;
const contourCoords = results.find((t) => t.shape[t.shape.length - 1] === 1404) as Tensor;
const faceConfidence = await confidence.data();
face.faceScore = Math.round(100 * faceConfidence[0]) / 100;
const coordsReshaped = tf.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tf.dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face.faceScore < (config.face.detector?.minConfidence || 1)) { // low confidence in detected mesh
box.confidence = face.faceScore; // reset confidence of cached box
} else {
if (config.face.iris?.enabled) rawCoords = await iris.augmentIris(rawCoords, face.tensor, config, inputSize); // augment results with iris
if (config.face.attention?.enabled) {
rawCoords = await attention.augment(rawCoords, results); // augment iris results using attention model results
} else if (config.face.iris?.enabled) {
rawCoords = await iris.augmentIris(rawCoords, face.tensor, config, inputSize); // run iris model and augment results
}
face.mesh = util.transformRawCoords(rawCoords, box, angle, rotationMatrix, inputSize); // get processed mesh
face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]);
for (const key of Object.keys(coords.meshAnnotations)) face.annotations[key] = coords.meshAnnotations[key].map((index) => face.mesh[index]); // add annotations
@ -101,6 +109,7 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
face.boxRaw = util.getRawBox(calculatedBox, input);
newCache.push(calculatedBox);
}
tf.dispose([...results, coordsReshaped]);
}
if (face.score > (config.face.detector?.minConfidence || 1)) faces.push(face);
else tf.dispose(face.tensor);
@ -111,8 +120,12 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
export async function load(config: Config): Promise<GraphModel> {
if (env.initial) model = null;
if (!model) model = await loadModel(config.face.mesh?.modelPath);
else if (config.debug) log('cached model:', model['modelUrl']);
if (!model) {
if (config.face.attention?.enabled) model = await loadModel(config.face.attention?.modelPath);
else model = await loadModel(config.face.mesh?.modelPath);
} else if (config.debug) {
log('cached model:', model['modelUrl']);
}
inputSize = model.inputs[0].shape ? model.inputs[0].shape[2] : 0;
return model;
}

View File

@ -129,7 +129,7 @@ export async function augmentIris(rawCoords, face, config, meshSize) {
const leftEyeData = eyePredictionsData.slice(0, irisLandmarks.numCoordinates * 3);
const { rawCoords: leftEyeRawCoords, iris: leftIrisRawCoords } = getEyeCoords(leftEyeData, leftEyeBox, leftEyeBoxSize, true);
const rightEyeData = eyePredictionsData.slice(irisLandmarks.numCoordinates * 3);
const { rawCoords: rightEyeRawCoords, iris: rightIrisRawCoords } = getEyeCoords(rightEyeData, rightEyeBox, rightEyeBoxSize);
const { rawCoords: rightEyeRawCoords, iris: rightIrisRawCoords } = getEyeCoords(rightEyeData, rightEyeBox, rightEyeBoxSize, false);
const leftToRightEyeDepthDifference = getLeftToRightEyeDepthDifference(rawCoords);
if (Math.abs(leftToRightEyeDepthDifference) < 30) { // User is looking straight ahead.
replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left', null);

View File

@ -17,7 +17,7 @@ import * as app from '../package.json';
import * as backend from './tfjs/backend';
import * as blazepose from './body/blazepose';
import * as centernet from './object/centernet';
import * as draw from './util/draw';
import * as draw from './draw/draw';
import * as efficientpose from './body/efficientpose';
import * as face from './face/face';
import * as facemesh from './face/facemesh';

View File

@ -159,25 +159,29 @@ export async function process(input: Input, config: Config, getTensor: boolean =
env.filter = !!fx;
if (!fx || !fx.add) {
if (config.debug) log('input process error: cannot initialize filters');
return { tensor: null, canvas: inCanvas };
env.webgl.supported = false;
config.filter.enabled = false;
copy(inCanvas, outCanvas); // filter failed to initialize
// return { tensor: null, canvas: inCanvas };
} else {
fx.reset();
if (config.filter.brightness !== 0) fx.add('brightness', config.filter.brightness);
if (config.filter.contrast !== 0) fx.add('contrast', config.filter.contrast);
if (config.filter.sharpness !== 0) fx.add('sharpen', config.filter.sharpness);
if (config.filter.blur !== 0) fx.add('blur', config.filter.blur);
if (config.filter.saturation !== 0) fx.add('saturation', config.filter.saturation);
if (config.filter.hue !== 0) fx.add('hue', config.filter.hue);
if (config.filter.negative) fx.add('negative');
if (config.filter.sepia) fx.add('sepia');
if (config.filter.vintage) fx.add('brownie');
if (config.filter.sepia) fx.add('sepia');
if (config.filter.kodachrome) fx.add('kodachrome');
if (config.filter.technicolor) fx.add('technicolor');
if (config.filter.polaroid) fx.add('polaroid');
if (config.filter.pixelate !== 0) fx.add('pixelate', config.filter.pixelate);
if (fx.get() > 0) outCanvas = fx.apply(inCanvas);
else outCanvas = fx.draw(inCanvas);
}
fx.reset();
if (config.filter.brightness !== 0) fx.add('brightness', config.filter.brightness);
if (config.filter.contrast !== 0) fx.add('contrast', config.filter.contrast);
if (config.filter.sharpness !== 0) fx.add('sharpen', config.filter.sharpness);
if (config.filter.blur !== 0) fx.add('blur', config.filter.blur);
if (config.filter.saturation !== 0) fx.add('saturation', config.filter.saturation);
if (config.filter.hue !== 0) fx.add('hue', config.filter.hue);
if (config.filter.negative) fx.add('negative');
if (config.filter.sepia) fx.add('sepia');
if (config.filter.vintage) fx.add('brownie');
if (config.filter.sepia) fx.add('sepia');
if (config.filter.kodachrome) fx.add('kodachrome');
if (config.filter.technicolor) fx.add('technicolor');
if (config.filter.polaroid) fx.add('polaroid');
if (config.filter.pixelate !== 0) fx.add('pixelate', config.filter.pixelate);
if (fx.get() > 0) outCanvas = fx.apply(inCanvas);
else outCanvas = fx.draw(inCanvas);
} else {
copy(inCanvas, outCanvas); // if no filters applied, output canvas is input canvas
if (fx) fx = null;

View File

@ -84,12 +84,12 @@ export function GLImageFilter() {
const shaderProgramCache = { }; // key is the shader program source, value is the compiled program
const DRAW = { INTERMEDIATE: 1 };
const gl = fxcanvas.getContext('webgl') as WebGLRenderingContext;
// @ts-ignore used for sanity checks outside of imagefx
this.gl = gl;
if (!gl) {
log('filter: cannot get webgl context');
return;
}
// @ts-ignore used for sanity checks outside of imagefx
this.gl = gl;
function resize(width, height) {
if (width === fxcanvas.width && height === fxcanvas.height) return; // Same width/height? Nothing to do here

View File

@ -1,24 +1,24 @@
2022-04-10 10:11:37 INFO:  Application: {"name":"@vladmandic/human","version":"2.6.5"}
2022-04-10 10:11:37 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2022-04-10 10:11:37 INFO:  Toolchain: {"build":"0.7.2","esbuild":"0.14.34","typescript":"4.6.3","typedoc":"0.22.14","eslint":"8.13.0"}
2022-04-10 10:11:37 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2022-04-10 10:11:37 STATE: Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]}
2022-04-10 10:11:37 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":595}
2022-04-10 10:11:37 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":64,"inputBytes":564552,"outputBytes":293314}
2022-04-10 10:11:37 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":599}
2022-04-10 10:11:37 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":64,"inputBytes":564556,"outputBytes":293318}
2022-04-10 10:11:37 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":651}
2022-04-10 10:11:37 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":64,"inputBytes":564608,"outputBytes":293368}
2022-04-10 10:11:37 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":394}
2022-04-10 10:11:37 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1068,"outputBytes":615}
2022-04-10 10:11:37 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":64,"inputBytes":564572,"outputBytes":292273}
2022-04-10 10:11:37 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1344714}
2022-04-10 10:11:37 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":64,"inputBytes":1908671,"outputBytes":1636003}
2022-04-10 10:11:37 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":64,"inputBytes":1908671,"outputBytes":2113825}
2022-04-10 10:11:43 STATE: Typings: {"input":"src/human.ts","output":"types/lib","files":112}
2022-04-10 10:11:45 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":72,"generated":true}
2022-04-10 10:11:45 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5862,"outputBytes":2915}
2022-04-10 10:11:45 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":15174,"outputBytes":7820}
2022-04-10 10:11:54 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":96,"errors":0,"warnings":0}
2022-04-10 10:11:54 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2022-04-10 10:11:54 INFO:  Done...
2022-04-11 11:43:57 INFO:  Application: {"name":"@vladmandic/human","version":"2.7.0"}
2022-04-11 11:43:57 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2022-04-11 11:43:57 INFO:  Toolchain: {"build":"0.7.2","esbuild":"0.14.34","typescript":"4.6.3","typedoc":"0.22.15","eslint":"8.13.0"}
2022-04-11 11:43:57 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2022-04-11 11:43:57 STATE: Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]}
2022-04-11 11:43:57 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":595}
2022-04-11 11:43:57 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":72,"inputBytes":568981,"outputBytes":294190}
2022-04-11 11:43:57 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":599}
2022-04-11 11:43:57 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":72,"inputBytes":568985,"outputBytes":294194}
2022-04-11 11:43:57 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":651}
2022-04-11 11:43:57 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":72,"inputBytes":569037,"outputBytes":294244}
2022-04-11 11:43:57 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":394}
2022-04-11 11:43:57 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1068,"outputBytes":615}
2022-04-11 11:43:57 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":72,"inputBytes":569001,"outputBytes":293151}
2022-04-11 11:43:57 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1344714}
2022-04-11 11:43:57 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":72,"inputBytes":1913100,"outputBytes":1636880}
2022-04-11 11:43:58 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":72,"inputBytes":1913100,"outputBytes":2115805}
2022-04-11 11:44:03 STATE: Typings: {"input":"src/human.ts","output":"types/lib","files":114}
2022-04-11 11:44:05 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":73,"generated":true}
2022-04-11 11:44:05 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5916,"outputBytes":2957}
2022-04-11 11:44:05 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":15174,"outputBytes":7820}
2022-04-11 11:44:13 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":104,"errors":0,"warnings":0}
2022-04-11 11:44:13 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2022-04-11 11:44:13 INFO:  Done...