diff --git a/CHANGELOG.md b/CHANGELOG.md
index b96f28e9..c3e15193 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -9,8 +9,9 @@
## Changelog
-### **HEAD -> main** 2022/11/18 mandic00@live.com
+### **HEAD -> main** 2022/11/21 mandic00@live.com
+- enforce markdown linting
- cleanup git history
- default empty result
- refactor draw and models namespaces
diff --git a/TODO.md b/TODO.md
index f1ce8dab..39879e1d 100644
--- a/TODO.md
+++ b/TODO.md
@@ -51,9 +51,9 @@ No support for running in **web workers** as Safari still does not support `Offs
## Pending Release Changes
-Optimizations:
+Optimizations:
- Enabled high-resolution optimizations
- Internal limits are increased from **2k** to **4k**
+ Internal limits are increased from **2k** to **4k**
- Enhanced device capabilities detection
See `human.env.[agent, wasm, webgl, webgpu]` for details
- If `config.backend` is not set, Human will auto-select best backend
@@ -62,15 +62,16 @@ Optimizations:
- Reduce build dependencies
`Human` is now 30% smaller :)
As usual, `Human` has **zero** runtime dependencies,
- all *devDependencies* are only to rebuild `Human` itself
+ all *devDependencies* are only to rebuild `Human` itself
- Default hand skeleton model changed from `handlandmark-full` to `handlandmark-lite`
Both models are still supported, this reduces default size and increases performance
-Features:
+Features:
- Add [draw label templates](https://github.com/vladmandic/human/wiki/Draw)
- Add `config.filter.autoBrightness` (*enabled by default*)
Per-frame video on-the-fly brightness adjustments
Which significantly increases performance and precision in poorly lit scenes
+- Add new demo [face detect]((https://vladmandic.github.io/human/demo/facedetect/index.html))
- Improved `config.filter.equalization` (*disabled by default*)
Image and video on-demand histogram equalization
- Support selecting specific video source when multiple cameras are present
@@ -78,7 +79,7 @@ Features:
- Updated algorithm to determine distance from camera based on iris size
See `human.result.face[n].distance`
-Architecture:
+Architecture:
- Upgrade to **TFJS 4.1** with **strong typing**
see [notes](https://github.com/vladmandic/human#typedefs) on how to use
- `TypeDef` refactoring
@@ -87,14 +88,14 @@ Architecture:
- Repack external typedefs
Removes all external typedef dependencies
- Refactor namespace exports
- Better [TypeDoc specs](https://vladmandic.github.io/human/typedoc/index.html)
+ Better [TypeDoc specs](https://vladmandic.github.io/human/typedoc/index.html)
- Add named export for improved bundler support when using non-default imports
-- Cleanup Git history for `dist`/`typedef`/`types`
-- Cleanup `@vladmandic/human-models`
+- Cleanup Git history for `dist`/`typedef`/`types`
+- Cleanup `@vladmandic/human-models`
- Support for **NodeJS v19**
- Upgrade to **TypeScript 4.9**
-Breaking changes:
+Breaking changes:
- Replaced `result.face[n].iris` with `result.face[n].distance`
- Replaced `human.getModelStats()` with `human.models.stats()`
- Moved `human.similarity`, `human.distance` and `human.match` to namespace `human.match.*`
diff --git a/assets/screenshot-facedetect.jpg b/assets/screenshot-facedetect.jpg
index c36342e8..26db86c2 100644
Binary files a/assets/screenshot-facedetect.jpg and b/assets/screenshot-facedetect.jpg differ
diff --git a/demo/facedetect/facedetect.js b/demo/facedetect/facedetect.js
index e3a8ac32..9d6d86a7 100644
--- a/demo/facedetect/facedetect.js
+++ b/demo/facedetect/facedetect.js
@@ -6,7 +6,8 @@
/** @type {Human} */
import { Human } from '../../dist/human.esm.js';
-import { showLoader, hideLoader } from './loader.js';
+
+let loader;
const humanConfig = { // user configuration for human, used to fine-tune behavior
debug: true,
@@ -30,29 +31,83 @@ const humanConfig = { // user configuration for human, used to fine-tune behavio
const human = new Human(humanConfig); // new instance of human
+export const showLoader = (msg) => { loader.setAttribute('msg', msg); loader.style.display = 'block'; };
+export const hideLoader = () => loader.style.display = 'none';
+
+class ComponentLoader extends HTMLElement { // watch for attributes
+ message = document.createElement('div');
+
+ static get observedAttributes() { return ['msg']; }
+
+ attributeChangedCallback(_name, _prevVal, currVal) {
+ this.message.innerHTML = currVal;
+ }
+
+ connectedCallback() { // triggered on insert
+ this.attachShadow({ mode: 'open' });
+ const css = document.createElement('style');
+ css.innerHTML = `
+ .loader-container { top: 450px; justify-content: center; position: fixed; width: 100%; }
+ .loader-message { font-size: 1.5rem; padding: 1rem; }
+ .loader { width: 300px; height: 300px; border: 3px solid transparent; border-radius: 50%; border-top: 4px solid #f15e41; animation: spin 4s linear infinite; position: relative; }
+ .loader::before, .loader::after { content: ""; position: absolute; top: 6px; bottom: 6px; left: 6px; right: 6px; border-radius: 50%; border: 4px solid transparent; }
+ .loader::before { border-top-color: #bad375; animation: 3s spin linear infinite; }
+ .loader::after { border-top-color: #26a9e0; animation: spin 1.5s linear infinite; }
+ @keyframes spin { from { transform: rotate(0deg); } to { transform: rotate(360deg); } }
+ `;
+ const container = document.createElement('div');
+ container.id = 'loader-container';
+ container.className = 'loader-container';
+ loader = document.createElement('div');
+ loader.id = 'loader';
+ loader.className = 'loader';
+ this.message.id = 'loader-message';
+ this.message.className = 'loader-message';
+ this.message.innerHTML = '';
+ container.appendChild(this.message);
+ container.appendChild(loader);
+ this.shadowRoot?.append(css, container);
+ loader = this; // eslint-disable-line @typescript-eslint/no-this-alias
+ }
+}
+
+customElements.define('component-loader', ComponentLoader);
+
+function addFace(face, source) {
+ const deg = (rad) => Math.round((rad || 0) * 180 / Math.PI);
+ const canvas = document.createElement('canvas');
+ const emotion = face.emotion?.map((e) => `${Math.round(100 * e.score)}% ${e.emotion}`) || [];
+ const rotation = `pitch ${deg(face.rotation?.angle.pitch)}° | roll ${deg(face.rotation?.angle.roll)}° | yaw ${deg(face.rotation?.angle.yaw)}°`;
+ const gaze = `direction ${deg(face.rotation?.gaze.bearing)}° strength ${Math.round(100 * (face.rotation.gaze.strength || 0))}%`;
+ canvas.title = `
+ source: ${source}
+ score: ${Math.round(100 * face.boxScore)}% detection ${Math.round(100 * face.faceScore)}% analysis
+ age: ${face.age} years | gender: ${face.gender} score ${Math.round(100 * face.genderScore)}%
+ emotion: ${emotion.join(' | ')}
+ head rotation: ${rotation}
+ eyes gaze: ${gaze}
+ camera distance: ${face.distance}m | ${Math.round(100 * face.distance / 2.54)}in
+ check: ${Math.round(100 * face.real)}% real ${Math.round(100 * face.live)}% live
+ `.replace(/ /g, ' ');
+ canvas.onclick = (e) => {
+ e.preventDefault();
+ document.getElementById('description').innerHTML = canvas.title;
+ };
+ human.tf.browser.toPixels(face.tensor, canvas);
+ human.tf.dispose(face.tensor);
+ return canvas;
+}
+
async function addFaces(imgEl) {
showLoader('human: busy');
const faceEl = document.getElementById('faces');
faceEl.innerHTML = '';
const res = await human.detect(imgEl);
+ console.log(res); // eslint-disable-line no-console
+ document.getElementById('description').innerHTML = `detected ${res.face.length} faces`;
for (const face of res.face) {
- const canvas = document.createElement('canvas');
- const emotion = face.emotion?.map((e) => `${Math.round(100 * e.score)}% ${e.emotion}`) || [];
- canvas.title = `
- source: ${imgEl.src.substring(0, 64)}
- score: ${Math.round(100 * face.boxScore)}% detection ${Math.round(100 * face.faceScore)}% analysis
- age: ${face.age} years
- gender: ${face.gender} score ${Math.round(100 * face.genderScore)}%
- emotion: ${emotion.join(' | ')}
- check: ${Math.round(100 * face.real)}% real ${Math.round(100 * face.live)}% live
- `.replace(/ /g, ' ');
- canvas.onclick = (e) => {
- e.preventDefault();
- document.getElementById('description').innerHTML = canvas.title;
- };
- human.tf.browser.toPixels(face.tensor, canvas);
- human.tf.dispose(face.tensor);
- faceEl?.appendChild(canvas);
+ const canvas = addFace(face, imgEl.src.substring(0, 64));
+ faceEl.appendChild(canvas);
}
hideLoader();
}
diff --git a/demo/facedetect/index.html b/demo/facedetect/index.html
index 844ddec2..e7bc6e0e 100644
--- a/demo/facedetect/index.html
+++ b/demo/facedetect/index.html
@@ -16,6 +16,7 @@