update faceid

Vladimir Mandic 2022-09-25 10:15:47 -04:00
parent 2666181b3b
commit 77194de344
11 changed files with 172 additions and 117 deletions

View File

@ -9,8 +9,9 @@
## Changelog
### **HEAD -> main** 2022/09/21 mandic00@live.com
### **HEAD -> main** 2022/09/25 mandic00@live.com
- create funding.yml
- fix rotation interpolation
### **2.10.3** 2022/09/21 mandic00@live.com

View File

@ -170,9 +170,15 @@ and optionally matches detected face with database of known people to guess thei
![Face Matching](assets/screenshot-facematch.jpg)
2. **Face ID:**
Performs validation check on a webcam input to detect a real face and matches it to known faces stored in database
> [demo/faceid](demo/faceid/index.html)
![Face Matching](assets/screenshot-faceid.jpg)
<br>
2. **3D Rendering:**
3. **3D Rendering:**
> [human-motion](https://github.com/vladmandic/human-motion)
![Face3D](https://github.com/vladmandic/human-motion/raw/main/assets/screenshot-face.jpg)
@ -181,14 +187,14 @@ and optionally matches detected face with database of known people to guess thei
<br>
3. **VR Model Tracking:**
4. **VR Model Tracking:**
> [human-three-vrm](https://github.com/vladmandic/human-three-vrm)
> [human-bjs-vrm](https://github.com/vladmandic/human-bjs-vrm)
![ThreeVRM](https://github.com/vladmandic/human-three-vrm/raw/main/assets/human-vrm-screenshot.jpg)
4. **Human as OS native application:**
5. **Human as OS native application:**
> [human-electron](https://github.com/vladmandic/human-electron)
<br>

View File

@ -50,5 +50,6 @@ Enable via `about:config` -> `gfx.offscreencanvas.enabled`
- Enable model cache when using web workers
- Fix for `face.rotation` interpolation
- Improve NodeJS resolver when using ESM
- Update demo `demo/faceid`
- Update demo `demo/nodejs/process-folder.js`
and re-process `/samples`

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

View File

@ -19,14 +19,23 @@
body { margin: 0; padding: 16px; background: black; color: white; overflow-x: hidden; width: 100vw; height: 100vh; }
body::-webkit-scrollbar { display: none; }
.button { padding: 2px; cursor: pointer; box-shadow: 2px 2px black; width: 64px; text-align: center; place-content: center; margin-left: 16px; height: 16px; display: none }
.ok { position: absolute; top: 64px; right: 20px; width: 100px; background-color: grey; padding: 4px; color: black; font-size: 14px }
.ok { position: absolute; top: 64px; right: 20px; width: 150px; background-color: grey; padding: 4px; color: black; font-size: 14px }
</style>
</head>
<body>
<div style="padding: 8px">
<h1 style="margin: 0">faceid demo using human library</h1>
look directly at camera and make sure that detection passes all of the required tests noted on the right hand side of the screen<br>
if input does not satisfies tests within specific timeout, no image will be selected<br>
once face image is approved, it will be compared with existing face database<br>
you can also store face descriptor with label in a browser's indexdb for future usage<br>
<br>
<i>note: this is not equivalent to full faceid methods as used by modern mobile phones or windows hello<br>
as they rely on additional infrared sensors and depth-sensing and not just camera image for additional levels of security</i>
</div>
<canvas id="canvas" style="padding: 8px"></canvas>
<canvas id="source" style="padding: 8px"></canvas>
<video id="video" playsinline style="display: none"></video>
<pre id="fps" style="position: absolute; bottom: 16px; right: 20px; background-color: grey; padding: 8px; box-shadow: 2px 2px black"></pre>
<pre id="log" style="padding: 8px"></pre>
<div id="match" style="display: none; padding: 8px">
<label for="name">name:</label>
@ -34,7 +43,7 @@
<span id="save" class="button" style="background-color: royalblue">save</span>
<span id="delete" class="button" style="background-color: lightcoral">delete</span>
</div>
<div id="retry" class="button" style="background-color: darkslategray; width: 350px; margin-top: 32px; padding: 4px">retry</div>
<div id="retry" class="button" style="background-color: darkslategray; width: 93%; margin-top: 32px; padding: 12px">retry</div>
<div id="ok"></div>
</body>
</html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -11,6 +11,7 @@ import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
import * as indexDb from './indexdb'; // methods to deal with indexdb
const humanConfig = { // user configuration for human, used to fine-tune behavior
cacheSensitivity: 0,
modelBasePath: '../../models',
filter: { equalization: true }, // lets run with histogram equilizer
face: {
@ -36,7 +37,7 @@ const matchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 }; // for fa
const options = {
minConfidence: 0.6, // overal face confidence for box, face, gender, real, live
minSize: 224, // min input to face descriptor model before degradation
maxTime: 10000, // max time before giving up
maxTime: 30000, // max time before giving up
blinkMin: 10, // minimum duration of a valid blink
blinkMax: 800, // maximum duration of a valid blink
threshold: 0.5, // minimum similarity
@ -46,18 +47,36 @@ const options = {
...matchOptions,
};
const ok = { // must meet all rules
faceCount: false,
faceConfidence: false,
facingCenter: false,
lookingCenter: false,
blinkDetected: false,
faceSize: false,
antispoofCheck: false,
livenessCheck: false,
elapsedMs: 0, // total time while waiting for valid face
const ok: Record<string, { status: boolean | undefined, val: number }> = { // must meet all rules
faceCount: { status: false, val: 0 },
faceConfidence: { status: false, val: 0 },
facingCenter: { status: false, val: 0 },
lookingCenter: { status: false, val: 0 },
blinkDetected: { status: false, val: 0 },
faceSize: { status: false, val: 0 },
antispoofCheck: { status: false, val: 0 },
livenessCheck: { status: false, val: 0 },
age: { status: false, val: 0 },
gender: { status: false, val: 0 },
timeout: { status: true, val: 0 },
descriptor: { status: false, val: 0 },
elapsedMs: { status: undefined, val: 0 }, // total time while waiting for valid face
detectFPS: { status: undefined, val: 0 }, // mark detection fps performance
drawFPS: { status: undefined, val: 0 }, // mark redraw fps performance
};
const allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.lookingCenter && ok.faceConfidence && ok.antispoofCheck && ok.livenessCheck;
const allOk = () => ok.faceCount.status
&& ok.faceSize.status
&& ok.blinkDetected.status
&& ok.facingCenter.status
&& ok.lookingCenter.status
&& ok.faceConfidence.status
&& ok.antispoofCheck.status
&& ok.livenessCheck.status
&& ok.descriptor.status
&& ok.age.status
&& ok.gender.status;
const current: { face: H.FaceResult | null, record: indexDb.FaceRecord | null } = { face: null, record: null }; // current face record and matched database record
const blink = { // internal timers for blink start/end/duration
@ -87,17 +106,14 @@ const dom = { // grab instances of dom objects so we dont have to look them up l
ok: document.getElementById('ok') as HTMLDivElement,
};
const timestamp = { detect: 0, draw: 0 }; // holds information used to calculate performance and possible memory leaks
const fps = { detect: 0, draw: 0 }; // holds calculated fps information for both detect and screen refresh
let startTime = 0;
const log = (...msg) => { // helper method to output messages
dom.log.innerText += msg.join(' ') + '\n';
console.log(...msg); // eslint-disable-line no-console
};
const printFPS = (msg) => dom.fps.innerText = msg; // print status element
async function webCam() { // initialize webcam
printFPS('starting webcam...');
// @ts-ignore resizeMode is not yet defined in tslib
const cameraOptions: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };
const stream: MediaStream = await navigator.mediaDevices.getUserMedia(cameraOptions);
@ -107,6 +123,8 @@ async function webCam() { // initialize webcam
await ready;
dom.canvas.width = dom.video.videoWidth;
dom.canvas.height = dom.video.videoHeight;
dom.canvas.style.width = '50%';
dom.canvas.style.height = '50%';
if (human.env.initial) log('video:', dom.video.videoWidth, dom.video.videoHeight, '|', stream.getVideoTracks()[0].label);
dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click
if (dom.video.paused) void dom.video.play();
@ -119,58 +137,71 @@ async function detectionLoop() { // main detection loop
if (current.face?.tensor) human.tf.dispose(current.face.tensor); // dispose previous tensor
await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result
const now = human.now();
fps.detect = 1000 / (now - timestamp.detect);
ok.detectFPS.val = Math.round(10000 / (now - timestamp.detect)) / 10;
timestamp.detect = now;
requestAnimationFrame(detectionLoop); // start new frame immediately
}
}
function drawValidationTests() {
let y = 32;
for (const [key, val] of Object.entries(ok)) {
let el = document.getElementById(`ok-${key}`);
if (!el) {
el = document.createElement('div');
el.id = `ok-${key}`;
el.innerText = key;
el.className = 'ok';
el.style.top = `${y}px`;
dom.ok.appendChild(el);
}
if (typeof val.status === 'boolean') el.style.backgroundColor = val.status ? 'lightgreen' : 'lightcoral';
const status = val.status ? 'ok' : 'fail';
el.innerText = `${key}: ${val.val === 0 ? status : val.val}`;
y += 28;
}
}
async function validationLoop(): Promise<H.FaceResult> { // main screen refresh loop
const interpolated = human.next(human.result); // smoothen result using last-known results
human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen
await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.
const now = human.now();
fps.draw = 1000 / (now - timestamp.draw);
ok.drawFPS.val = Math.round(10000 / (now - timestamp.draw)) / 10;
timestamp.draw = now;
printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status
ok.faceCount = human.result.face.length === 1; // must be exactly detected face
if (ok.faceCount) { // skip the rest if no face
ok.faceCount.val = human.result.face.length;
ok.faceCount.status = ok.faceCount.val === 1; // must be exactly detected face
if (ok.faceCount.status) { // skip the rest if no face
const gestures: string[] = Object.values(human.result.gesture).map((gesture: H.GestureResult) => gesture.gesture); // flatten all gestures
if (gestures.includes('blink left eye') || gestures.includes('blink right eye')) blink.start = human.now(); // blink starts when eyes get closed
if (blink.start > 0 && !gestures.includes('blink left eye') && !gestures.includes('blink right eye')) blink.end = human.now(); // if blink started how long until eyes are back open
ok.blinkDetected = ok.blinkDetected || (Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax);
if (ok.blinkDetected && blink.time === 0) blink.time = Math.trunc(blink.end - blink.start);
ok.facingCenter = gestures.includes('facing center');
ok.lookingCenter = gestures.includes('looking center'); // must face camera and look at camera
ok.faceConfidence = (human.result.face[0].boxScore || 0) > options.minConfidence && (human.result.face[0].faceScore || 0) > options.minConfidence;
ok.antispoofCheck = (human.result.face[0].real || 0) > options.minConfidence;
ok.livenessCheck = (human.result.face[0].live || 0) > options.minConfidence;
ok.faceSize = human.result.face[0].box[2] >= options.minSize && human.result.face[0].box[3] >= options.minSize;
}
let y = 32;
for (const [key, val] of Object.entries(ok)) {
let el = document.getElementById(`ok-${key}`);
if (!el) {
el = document.createElement('div');
el.innerText = key;
el.className = 'ok';
el.style.top = `${y}px`;
dom.ok.appendChild(el);
}
if (typeof val === 'boolean') el.style.backgroundColor = val ? 'lightgreen' : 'lightcoral';
else el.innerText = `${key}:${val}`;
y += 28;
}
if (allOk()) { // all criteria met
dom.video.pause();
return human.result.face[0];
}
if (ok.elapsedMs > options.maxTime) { // give up
dom.video.pause();
return human.result.face[0];
ok.blinkDetected.status = ok.blinkDetected.status || (Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax);
if (ok.blinkDetected.status && blink.time === 0) blink.time = Math.trunc(blink.end - blink.start);
ok.facingCenter.status = gestures.includes('facing center');
ok.lookingCenter.status = gestures.includes('looking center'); // must face camera and look at camera
ok.faceConfidence.val = human.result.face[0].faceScore || human.result.face[0].boxScore || 0;
ok.faceConfidence.status = ok.faceConfidence.val >= options.minConfidence;
ok.antispoofCheck.val = human.result.face[0].real || 0;
ok.antispoofCheck.status = ok.antispoofCheck.val >= options.minConfidence;
ok.livenessCheck.val = human.result.face[0].live || 0;
ok.livenessCheck.status = ok.livenessCheck.val >= options.minConfidence;
ok.faceSize.val = Math.min(human.result.face[0].box[2], human.result.face[0].box[3]);
ok.faceSize.status = ok.faceSize.val >= options.minSize;
ok.descriptor.val = human.result.face[0].embedding?.length || 0;
ok.descriptor.status = ok.descriptor.val > 0;
ok.age.val = human.result.face[0].age || 0;
ok.age.status = ok.age.val > 0;
ok.gender.val = human.result.face[0].genderScore || 0;
ok.gender.status = ok.gender.val >= options.minConfidence;
}
// run again
ok.elapsedMs = Math.trunc(human.now() - startTime);
ok.timeout.status = ok.elapsedMs.val <= options.maxTime;
drawValidationTests();
if (allOk() || !ok.timeout.status) { // all criteria met
dom.video.pause();
return human.result.face[0];
}
ok.elapsedMs.val = Math.trunc(human.now() - startTime);
return new Promise((resolve) => {
setTimeout(async () => {
await validationLoop(); // run validation loop until conditions are met
@ -198,12 +229,14 @@ async function deleteRecord() {
}
async function detectFace() {
dom.canvas.style.height = '';
dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize);
if (!current?.face?.tensor || !current?.face?.embedding) return false;
console.log('face record:', current.face); // eslint-disable-line no-console
log(`detected face: ${current.face.gender} ${current.face.age || 0}y distance ${current.face.iris || 0}cm/${Math.round(100 * (current.face.iris || 0) / 2.54) / 100}in`);
human.tf.browser.toPixels(current.face.tensor as unknown as H.TensorLike, dom.canvas);
if (await indexDb.count() === 0) {
log('face database is empty');
log('face database is empty: nothing to compare face with');
document.body.style.background = 'black';
dom.delete.style.display = 'none';
return false;
@ -223,17 +256,20 @@ async function detectFace() {
}
async function main() { // main entry point
ok.faceCount = false;
ok.faceConfidence = false;
ok.facingCenter = false;
ok.blinkDetected = false;
ok.faceSize = false;
ok.antispoofCheck = false;
ok.livenessCheck = false;
ok.elapsedMs = 0;
ok.faceCount.status = false;
ok.faceConfidence.status = false;
ok.facingCenter.status = false;
ok.blinkDetected.status = false;
ok.faceSize.status = false;
ok.antispoofCheck.status = false;
ok.livenessCheck.status = false;
ok.age.status = false;
ok.gender.status = false;
ok.elapsedMs.val = 0;
dom.match.style.display = 'none';
dom.retry.style.display = 'none';
dom.source.style.display = 'none';
dom.canvas.style.height = '50%';
document.body.style.background = 'black';
await webCam();
await detectionLoop(); // start detection loop
@ -257,13 +293,15 @@ async function main() { // main entry point
async function init() {
log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);
log('face embedding model:', humanConfig.face.description.enabled ? 'faceres' : '', humanConfig.face['mobilefacenet']?.enabled ? 'mobilefacenet' : '', humanConfig.face['insightface']?.enabled ? 'insightface' : '');
log('options:', JSON.stringify(options).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ' '));
printFPS('loading...');
log('known face records:', await indexDb.count());
log('initializing webcam...');
await webCam(); // start webcam
log('loading human models...');
await human.load(); // preload all models
printFPS('initializing...');
log('initializing human...');
log('face embedding model:', humanConfig.face.description.enabled ? 'faceres' : '', humanConfig.face['mobilefacenet']?.enabled ? 'mobilefacenet' : '', humanConfig.face['insightface']?.enabled ? 'insightface' : '');
log('loading face database...');
log('known face records:', await indexDb.count());
dom.retry.addEventListener('click', main);
dom.save.addEventListener('click', saveRecords);
dom.delete.addEventListener('click', deleteRecord);

View File

@ -81,7 +81,7 @@
"@tensorflow/tfjs-node": "^3.20.0",
"@tensorflow/tfjs-node-gpu": "^3.20.0",
"@tensorflow/tfjs-tflite": "0.0.1-alpha.8",
"@types/node": "^18.7.20",
"@types/node": "^18.7.21",
"@types/offscreencanvas": "^2019.7.0",
"@typescript-eslint/eslint-plugin": "^5.38.0",
"@typescript-eslint/parser": "^5.38.0",

View File

@ -1,39 +1,39 @@
2022-09-24 11:38:12 DATA:  Build {"name":"@vladmandic/human","version":"2.10.3"}
2022-09-24 11:38:12 INFO:  Application: {"name":"@vladmandic/human","version":"2.10.3"}
2022-09-24 11:38:12 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2022-09-24 11:38:12 INFO:  Toolchain: {"build":"0.7.13","esbuild":"0.15.9","typescript":"4.8.3","typedoc":"0.23.15","eslint":"8.24.0"}
2022-09-24 11:38:12 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2022-09-24 11:38:12 STATE: Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]}
2022-09-24 11:38:12 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":159,"outputBytes":608}
2022-09-24 11:38:12 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":75,"inputBytes":658536,"outputBytes":309564}
2022-09-24 11:38:12 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":167,"outputBytes":612}
2022-09-24 11:38:12 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":75,"inputBytes":658540,"outputBytes":309568}
2022-09-24 11:38:12 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":206,"outputBytes":664}
2022-09-24 11:38:12 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":75,"inputBytes":658592,"outputBytes":309618}
2022-09-24 11:38:12 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1125,"outputBytes":358}
2022-09-24 11:38:12 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1088,"outputBytes":583}
2022-09-24 11:38:12 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":75,"inputBytes":658511,"outputBytes":308423}
2022-09-24 11:38:12 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":11,"inputBytes":1344,"outputBytes":2821914}
2022-09-24 11:38:12 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":75,"inputBytes":3479842,"outputBytes":1688553}
2022-09-24 11:38:12 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":75,"inputBytes":3479842,"outputBytes":3110385}
2022-09-24 11:38:17 STATE: Typings: {"input":"src/human.ts","output":"types/lib","files":15}
2022-09-24 11:38:19 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":77,"generated":true}
2022-09-24 11:38:19 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6714,"outputBytes":3134}
2022-09-24 11:38:19 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":15488,"outputBytes":7788}
2022-09-24 11:38:29 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":110,"errors":0,"warnings":0}
2022-09-24 11:38:29 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2022-09-24 11:38:29 STATE: Copy: {"input":"tfjs/tfjs.esm.d.ts"}
2022-09-24 11:38:29 INFO:  Done...
2022-09-24 11:38:30 STATE: API-Extractor: {"succeeeded":true,"errors":0,"warnings":193}
2022-09-24 11:38:30 STATE: Copy: {"input":"types/human.d.ts"}
2022-09-24 11:38:30 INFO:  Analyze models: {"folders":8,"result":"models/models.json"}
2022-09-24 11:38:30 STATE: Models {"folder":"./models","models":13}
2022-09-24 11:38:30 STATE: Models {"folder":"../human-models/models","models":42}
2022-09-24 11:38:30 STATE: Models {"folder":"../blazepose/model/","models":4}
2022-09-24 11:38:30 STATE: Models {"folder":"../anti-spoofing/model","models":1}
2022-09-24 11:38:30 STATE: Models {"folder":"../efficientpose/models","models":3}
2022-09-24 11:38:30 STATE: Models {"folder":"../insightface/models","models":5}
2022-09-24 11:38:30 STATE: Models {"folder":"../movenet/models","models":3}
2022-09-24 11:38:30 STATE: Models {"folder":"../nanodet/models","models":4}
2022-09-24 11:38:31 STATE: Models: {"count":57,"totalSize":383017442}
2022-09-24 11:38:31 INFO:  Human Build complete... {"logFile":"test/build.log"}
2022-09-25 10:14:55 DATA:  Build {"name":"@vladmandic/human","version":"2.10.3"}
2022-09-25 10:14:55 INFO:  Application: {"name":"@vladmandic/human","version":"2.10.3"}
2022-09-25 10:14:55 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2022-09-25 10:14:55 INFO:  Toolchain: {"build":"0.7.13","esbuild":"0.15.9","typescript":"4.8.3","typedoc":"0.23.15","eslint":"8.24.0"}
2022-09-25 10:14:55 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2022-09-25 10:14:55 STATE: Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]}
2022-09-25 10:14:55 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":159,"outputBytes":608}
2022-09-25 10:14:55 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":75,"inputBytes":658536,"outputBytes":309564}
2022-09-25 10:14:55 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":167,"outputBytes":612}
2022-09-25 10:14:55 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":75,"inputBytes":658540,"outputBytes":309568}
2022-09-25 10:14:55 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":206,"outputBytes":664}
2022-09-25 10:14:55 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":75,"inputBytes":658592,"outputBytes":309618}
2022-09-25 10:14:55 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1125,"outputBytes":358}
2022-09-25 10:14:55 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1088,"outputBytes":583}
2022-09-25 10:14:55 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":75,"inputBytes":658511,"outputBytes":308423}
2022-09-25 10:14:55 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":11,"inputBytes":1344,"outputBytes":2821914}
2022-09-25 10:14:55 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":75,"inputBytes":3479842,"outputBytes":1688553}
2022-09-25 10:14:56 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":75,"inputBytes":3479842,"outputBytes":3110385}
2022-09-25 10:15:00 STATE: Typings: {"input":"src/human.ts","output":"types/lib","files":15}
2022-09-25 10:15:02 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":77,"generated":true}
2022-09-25 10:15:02 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6714,"outputBytes":3134}
2022-09-25 10:15:02 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17155,"outputBytes":9175}
2022-09-25 10:15:13 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":110,"errors":0,"warnings":0}
2022-09-25 10:15:14 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2022-09-25 10:15:14 STATE: Copy: {"input":"tfjs/tfjs.esm.d.ts"}
2022-09-25 10:15:14 INFO:  Done...
2022-09-25 10:15:14 STATE: API-Extractor: {"succeeeded":true,"errors":0,"warnings":193}
2022-09-25 10:15:14 STATE: Copy: {"input":"types/human.d.ts"}
2022-09-25 10:15:14 INFO:  Analyze models: {"folders":8,"result":"models/models.json"}
2022-09-25 10:15:14 STATE: Models {"folder":"./models","models":13}
2022-09-25 10:15:14 STATE: Models {"folder":"../human-models/models","models":42}
2022-09-25 10:15:14 STATE: Models {"folder":"../blazepose/model/","models":4}
2022-09-25 10:15:14 STATE: Models {"folder":"../anti-spoofing/model","models":1}
2022-09-25 10:15:14 STATE: Models {"folder":"../efficientpose/models","models":3}
2022-09-25 10:15:14 STATE: Models {"folder":"../insightface/models","models":5}
2022-09-25 10:15:14 STATE: Models {"folder":"../movenet/models","models":3}
2022-09-25 10:15:14 STATE: Models {"folder":"../nanodet/models","models":4}
2022-09-25 10:15:15 STATE: Models: {"count":57,"totalSize":383017442}
2022-09-25 10:15:15 INFO:  Human Build complete... {"logFile":"test/build.log"}

2
wiki

@ -1 +1 @@
Subproject commit cf9ea4929d720dcb4e1b25a6b7c1fb4c5b4d2718
Subproject commit c90beadaf77a71df5c25c08d878ad2b6913b15dd