diff --git a/.build.json b/.build.json
index 344ce9b0..cffdaffb 100644
--- a/.build.json
+++ b/.build.json
@@ -143,13 +143,21 @@
"typedoc": "typedoc"
},
{
- "name": "demo/browser",
+ "name": "demo/typescript",
"platform": "browser",
"format": "esm",
"input": "demo/typescript/index.ts",
"output": "demo/typescript/index.js",
"sourcemap": true,
- "minify": false,
+ "external": ["*/human.esm.js"]
+ },
+ {
+ "name": "demo/facerecognition",
+ "platform": "browser",
+ "format": "esm",
+ "input": "demo/facerecognition/index.ts",
+ "output": "demo/facerecognition/index.js",
+ "sourcemap": true,
"external": ["*/human.esm.js"]
}
]
diff --git a/.eslintrc.json b/.eslintrc.json
index 3f753a3b..e125a983 100644
--- a/.eslintrc.json
+++ b/.eslintrc.json
@@ -29,6 +29,7 @@
"assets",
"demo/helpers/*.js",
"demo/typescript/*.js",
+ "demo/facerecognition/*.js",
"dist",
"media",
"models",
@@ -60,6 +61,7 @@
"no-bitwise": "off",
"no-case-declarations":"off",
"no-continue": "off",
+ "no-else-return": "off",
"no-lonely-if": "off",
"no-loop-func": "off",
"no-mixed-operators": "off",
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 90fc5bf1..273e6816 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -11,9 +11,8 @@
### **HEAD -> main** 2021/11/08 mandic00@live.com
-
-### **origin/main** 2021/11/08 mandic00@live.com
-
+- add type defs when working with relative path imports
+- disable humangl backend if webgl 1.0 is detected
### **release: 2.5.1** 2021/11/08 mandic00@live.com
diff --git a/demo/facerecognition/index.html b/demo/facerecognition/index.html
new file mode 100644
index 00000000..907fa811
--- /dev/null
+++ b/demo/facerecognition/index.html
@@ -0,0 +1,30 @@
+
+
+
+
+ Human: Face Recognition
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/demo/facerecognition/index.js b/demo/facerecognition/index.js
new file mode 100644
index 00000000..844f58c0
--- /dev/null
+++ b/demo/facerecognition/index.js
@@ -0,0 +1,166 @@
+/*
+ Human
+ homepage:
+ author: '
+*/
+
+// demo/facerecognition/index.ts
+import { Human } from "../../dist/human.esm.js";
+var humanConfig = {
+ modelBasePath: "../../models",
+ filter: { equalization: true },
+ face: {
+ enabled: true,
+ detector: { rotation: true, return: true },
+ description: { enabled: true },
+ iris: { enabled: true },
+ emotion: { enabled: false },
+ antispoof: { enabled: true }
+ },
+ body: { enabled: false },
+ hand: { enabled: false },
+ object: { enabled: false },
+ gesture: { enabled: true }
+};
+var options = {
+ minConfidence: 0.6,
+ minSize: 224,
+ maxTime: 1e4
+};
+var human = new Human(humanConfig);
+human.env["perfadd"] = false;
+human.draw.options.font = 'small-caps 18px "Lato"';
+human.draw.options.lineHeight = 20;
+var dom = {
+ video: document.getElementById("video"),
+ canvas: document.getElementById("canvas"),
+ log: document.getElementById("log"),
+ fps: document.getElementById("fps"),
+ status: document.getElementById("status")
+};
+var timestamp = { detect: 0, draw: 0 };
+var fps = { detect: 0, draw: 0 };
+var startTime = 0;
+var log = (...msg) => {
+ dom.log.innerText += msg.join(" ") + "\n";
+ console.log(...msg);
+};
+var printFPS = (msg) => dom.fps.innerText = msg;
+var printStatus = (msg) => dom.status.innerText = "status: " + JSON.stringify(msg).replace(/"|{|}/g, "").replace(/,/g, " | ");
+async function webCam() {
+ printFPS("starting webcam...");
+ const cameraOptions = { audio: false, video: { facingMode: "user", resizeMode: "none", width: { ideal: document.body.clientWidth } } };
+ const stream = await navigator.mediaDevices.getUserMedia(cameraOptions);
+ const ready = new Promise((resolve) => {
+ dom.video.onloadeddata = () => resolve(true);
+ });
+ dom.video.srcObject = stream;
+ dom.video.play();
+ await ready;
+ dom.canvas.width = dom.video.videoWidth;
+ dom.canvas.height = dom.video.videoHeight;
+ const track = stream.getVideoTracks()[0];
+ const capabilities = track.getCapabilities ? track.getCapabilities() : "";
+ const settings = track.getSettings ? track.getSettings() : "";
+ const constraints = track.getConstraints ? track.getConstraints() : "";
+ log("video:", dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });
+ dom.canvas.onclick = () => {
+ if (dom.video.paused)
+ dom.video.play();
+ else
+ dom.video.pause();
+ };
+}
+async function detectionLoop() {
+ if (!dom.video.paused) {
+ await human.detect(dom.video);
+ const now = human.now();
+ fps.detect = 1e3 / (now - timestamp.detect);
+ timestamp.detect = now;
+ requestAnimationFrame(detectionLoop);
+ }
+}
+var ok = {
+ faceCount: false,
+ faceConfidence: false,
+ facingCenter: false,
+ eyesOpen: false,
+ blinkDetected: false,
+ faceSize: false,
+ antispoofCheck: false,
+ livenessCheck: false,
+ elapsedMs: 0
+};
+var allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.faceConfidence && ok.antispoofCheck;
+async function validationLoop() {
+ const interpolated = await human.next(human.result);
+ await human.draw.canvas(dom.video, dom.canvas);
+ await human.draw.all(dom.canvas, interpolated);
+ const now = human.now();
+ fps.draw = 1e3 / (now - timestamp.draw);
+ timestamp.draw = now;
+ printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, " ")} detect | ${fps.draw.toFixed(1).padStart(5, " ")} draw`);
+ const gestures = Object.values(human.result.gesture).map((gesture) => gesture.gesture);
+ ok.faceCount = human.result.face.length === 1;
+ ok.eyesOpen = ok.eyesOpen || !(gestures.includes("blink left eye") || gestures.includes("blink right eye"));
+ ok.blinkDetected = ok.eyesOpen && ok.blinkDetected || gestures.includes("blink left eye") || gestures.includes("blink right eye");
+ ok.facingCenter = gestures.includes("facing center") && gestures.includes("looking center");
+ ok.faceConfidence = (human.result.face[0].boxScore || 0) > options.minConfidence && (human.result.face[0].faceScore || 0) > options.minConfidence && (human.result.face[0].genderScore || 0) > options.minConfidence;
+ ok.antispoofCheck = (human.result.face[0].real || 0) > options.minConfidence;
+ ok.faceSize = human.result.face[0].box[2] >= options.minSize && human.result.face[0].box[3] >= options.minSize;
+ printStatus(ok);
+ if (allOk()) {
+ dom.video.pause();
+ return human.result.face;
+ } else {
+ human.tf.dispose(human.result.face[0].tensor);
+ }
+ if (ok.elapsedMs > options.maxTime) {
+ dom.video.pause();
+ return human.result.face;
+ } else {
+ ok.elapsedMs = Math.trunc(human.now() - startTime);
+ return new Promise((resolve) => {
+ setTimeout(async () => {
+ const res = await validationLoop();
+ if (res)
+ resolve(human.result.face);
+ }, 30);
+ });
+ }
+}
+async function detectFace(face) {
+ dom.canvas.width = face.tensor.shape[2];
+ dom.canvas.height = face.tensor.shape[1];
+ dom.canvas.style.width = "";
+ human.tf.browser.toPixels(face.tensor, dom.canvas);
+ human.tf.dispose(face.tensor);
+}
+async function main() {
+ log("human version:", human.version, "| tfjs version:", human.tf.version_core);
+ printFPS("loading...");
+ await human.load();
+ printFPS("initializing...");
+ await human.warmup();
+ await webCam();
+ await detectionLoop();
+ startTime = human.now();
+ const face = await validationLoop();
+ if (!allOk())
+ log("did not find valid input", face);
+ else {
+ log("found valid face", face);
+ await detectFace(face[0]);
+ }
+ dom.fps.style.display = "none";
+}
+window.onload = main;
+/**
+ * Human demo for browsers
+ * @default Human Library
+ * @summary
+ * @author
+ * @copyright
+ * @license MIT
+ */
+//# sourceMappingURL=index.js.map
diff --git a/demo/facerecognition/index.js.map b/demo/facerecognition/index.js.map
new file mode 100644
index 00000000..c479cc1e
--- /dev/null
+++ b/demo/facerecognition/index.js.map
@@ -0,0 +1,7 @@
+{
+ "version": 3,
+ "sources": ["index.ts"],
+ "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport { Human } from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\n\nconst humanConfig = { // user configuration for human, used to fine-tune behavior\n modelBasePath: '../../models',\n filter: { equalization: true }, // lets run with histogram equilizer\n face: {\n enabled: true,\n detector: { rotation: true, return: true }, // return tensor is not really needed except to draw detected face\n description: { enabled: true },\n iris: { enabled: true }, // needed to determine gaze direction\n emotion: { enabled: false }, // not needed\n antispoof: { enabled: true }, // enable optional antispoof as well\n },\n body: { enabled: false },\n hand: { enabled: false },\n object: { enabled: false },\n gesture: { enabled: true },\n};\n\nconst options = {\n minConfidence: 0.6, // overal face confidence for box, face, gender, real\n minSize: 224, // min input to face descriptor model before degradation\n maxTime: 10000, // max time before giving up\n};\n\nconst human = new Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env['perfadd'] = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('fps') as HTMLPreElement,\n status: document.getElementById('status') as HTMLPreElement,\n};\nconst timestamp = { detect: 0, draw: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detect: 0, draw: 0 }; // holds calculated fps information for both detect and screen refresh\nlet startTime = 0;\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n // eslint-disable-next-line no-console\n console.log(...msg);\n};\nconst printFPS = (msg) => dom.fps.innerText = msg; // print status element\nconst printStatus = (msg) => dom.status.innerText = 'status: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | '); // print status element\n\nasync function webCam() { // initialize webcam\n printFPS('starting webcam...');\n // @ts-ignore resizeMode is not yet defined in tslib\n const cameraOptions: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(cameraOptions);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n const track: MediaStreamTrack = stream.getVideoTracks()[0];\n const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';\n const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';\n const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';\n log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });\n dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click\n if (dom.video.paused) dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const now = human.now();\n fps.detect = 1000 / (now - timestamp.detect);\n timestamp.detect = now;\n requestAnimationFrame(detectionLoop); // start new frame immediately\n }\n}\n\nconst ok = { // must meet all rules\n faceCount: false,\n faceConfidence: false,\n facingCenter: false,\n eyesOpen: false,\n blinkDetected: false,\n faceSize: false,\n antispoofCheck: false,\n livenessCheck: false,\n elapsedMs: 0,\n};\nconst allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.faceConfidence && ok.antispoofCheck;\n\nasync function validationLoop(): Promise { // main screen refresh loop\n const interpolated = await human.next(human.result); // smoothen result using last-known results\n await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n const now = human.now();\n fps.draw = 1000 / (now - timestamp.draw);\n timestamp.draw = now;\n printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status\n\n const gestures: string[] = Object.values(human.result.gesture).map((gesture) => gesture.gesture); // flatten all gestures\n ok.faceCount = human.result.face.length === 1; // must be exactly detected face\n ok.eyesOpen = ok.eyesOpen || !(gestures.includes('blink left eye') || gestures.includes('blink right eye')); // blink validation is only ok once both eyes are open\n ok.blinkDetected = ok.eyesOpen && ok.blinkDetected || gestures.includes('blink left eye') || gestures.includes('blink right eye'); // need to detect blink only once\n ok.facingCenter = gestures.includes('facing center') && gestures.includes('looking center'); // must face camera and look at camera\n ok.faceConfidence = (human.result.face[0].boxScore || 0) > options.minConfidence && (human.result.face[0].faceScore || 0) > options.minConfidence && (human.result.face[0].genderScore || 0) > options.minConfidence;\n ok.antispoofCheck = (human.result.face[0].real || 0) > options.minConfidence;\n ok.faceSize = human.result.face[0].box[2] >= options.minSize && human.result.face[0].box[3] >= options.minSize;\n\n printStatus(ok);\n\n if (allOk()) { // all criteria met\n dom.video.pause();\n return human.result.face;\n } else {\n human.tf.dispose(human.result.face[0].tensor); // results are not ok, so lets dispose tensor\n }\n if (ok.elapsedMs > options.maxTime) { // give up\n dom.video.pause();\n return human.result.face;\n } else { // run again\n ok.elapsedMs = Math.trunc(human.now() - startTime);\n return new Promise((resolve) => {\n setTimeout(async () => {\n const res = await validationLoop(); // run validation loop until conditions are met\n if (res) resolve(human.result.face); // recursive promise resolve\n }, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n });\n }\n}\n\nasync function detectFace(face) {\n // draw face and dispose face tensor immediatey afterwards\n dom.canvas.width = face.tensor.shape[2];\n dom.canvas.height = face.tensor.shape[1];\n dom.canvas.style.width = '';\n human.tf.browser.toPixels(face.tensor, dom.canvas);\n human.tf.dispose(face.tensor);\n\n // run detection using human.match and use face.embedding as input descriptor\n // tbd\n}\n\nasync function main() { // main entry point\n log('human version:', human.version, '| tfjs version:', human.tf.version_core);\n printFPS('loading...');\n await human.load(); // preload all models\n printFPS('initializing...');\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await webCam(); // start webcam\n await detectionLoop(); // start detection loop\n startTime = human.now();\n const face = await validationLoop(); // start validation loop\n if (!allOk()) log('did not find valid input', face);\n else {\n log('found valid face', face);\n await detectFace(face[0]);\n }\n dom.fps.style.display = 'none';\n}\n\nwindow.onload = main;\n"],
+ "mappings": ";;;;;;;AASA;AATA,AAWA,IAAM,cAAc;AAAA,EAClB,eAAe;AAAA,EACf,QAAQ,EAAE,cAAc;AAAA,EACxB,MAAM;AAAA,IACJ,SAAS;AAAA,IACT,UAAU,EAAE,UAAU,MAAM,QAAQ;AAAA,IACpC,aAAa,EAAE,SAAS;AAAA,IACxB,MAAM,EAAE,SAAS;AAAA,IACjB,SAAS,EAAE,SAAS;AAAA,IACpB,WAAW,EAAE,SAAS;AAAA;AAAA,EAExB,MAAM,EAAE,SAAS;AAAA,EACjB,MAAM,EAAE,SAAS;AAAA,EACjB,QAAQ,EAAE,SAAS;AAAA,EACnB,SAAS,EAAE,SAAS;AAAA;AAGtB,IAAM,UAAU;AAAA,EACd,eAAe;AAAA,EACf,SAAS;AAAA,EACT,SAAS;AAAA;AAGX,IAAM,QAAQ,IAAI,MAAM;AAExB,MAAM,IAAI,aAAa;AACvB,MAAM,KAAK,QAAQ,OAAO;AAC1B,MAAM,KAAK,QAAQ,aAAa;AAEhC,IAAM,MAAM;AAAA,EACV,OAAO,SAAS,eAAe;AAAA,EAC/B,QAAQ,SAAS,eAAe;AAAA,EAChC,KAAK,SAAS,eAAe;AAAA,EAC7B,KAAK,SAAS,eAAe;AAAA,EAC7B,QAAQ,SAAS,eAAe;AAAA;AAElC,IAAM,YAAY,EAAE,QAAQ,GAAG,MAAM;AACrC,IAAM,MAAM,EAAE,QAAQ,GAAG,MAAM;AAC/B,IAAI,YAAY;AAEhB,IAAM,MAAM,IAAI,QAAQ;AACtB,MAAI,IAAI,aAAa,IAAI,KAAK,OAAO;AAErC,UAAQ,IAAI,GAAG;AAAA;AAEjB,IAAM,WAAW,CAAC,QAAQ,IAAI,IAAI,YAAY;AAC9C,IAAM,cAAc,CAAC,QAAQ,IAAI,OAAO,YAAY,aAAa,KAAK,UAAU,KAAK,QAAQ,UAAU,IAAI,QAAQ,MAAM;AAEzH,wBAAwB;AACtB,WAAS;AAET,QAAM,gBAAwC,EAAE,OAAO,OAAO,OAAO,EAAE,YAAY,QAAQ,YAAY,QAAQ,OAAO,EAAE,OAAO,SAAS,KAAK;AAC7I,QAAM,SAAsB,MAAM,UAAU,aAAa,aAAa;AACtE,QAAM,QAAQ,IAAI,QAAQ,CAAC,YAAY;AAAE,QAAI,MAAM,eAAe,MAAM,QAAQ;AAAA;AAChF,MAAI,MAAM,YAAY;AACtB,MAAI,MAAM;AACV,QAAM;AACN,MAAI,OAAO,QAAQ,IAAI,MAAM;AAC7B,MAAI,OAAO,SAAS,IAAI,MAAM;AAC9B,QAAM,QAA0B,OAAO,iBAAiB;AACxD,QAAM,eAAgD,MAAM,kBAAkB,MAAM,oBAAoB;AACxG,QAAM,WAAwC,MAAM,cAAc,MAAM,gBAAgB;AACxF,QAAM,cAA8C,MAAM,iBAAiB,MAAM,mBAAmB;AACpG,MAAI,UAAU,IAAI,MAAM,YAAY,IAAI,MAAM,aAAa,MAAM,OAAO,EAAE,QAAQ,OAAO,UAAU,aAAa;AAChH,MAAI,OAAO,UAAU,MAAM;AACzB,QAAI,IAAI,MAAM;AAAQ,UAAI,MAAM;AAAA;AAC3B,UAAI,MAAM;AAAA;AAAA;AAInB,+BAA+B;AAC7B,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,UAAM,MAAM,OAAO,IAAI;AACvB,UAAM,MAAM,MAAM;AAClB,QAAI,SAAS,MAAQ,OAAM,UAAU;AACrC,cAAU,SAAS;AACnB,0BAAsB;AAAA;AAAA;AAI1B,IAAM,KAAK;AAAA,EACT,WAAW;AAAA,EACX,gBAAgB;AAAA,EAChB,cAAc;AAAA,EACd,UAAU;AAAA,EACV,eAAe;AAAA,EACf,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,eAAe;AAAA,EACf,WAAW;AAAA;AAEb,IAAM,QAAQ,MAAM,GAAG,aAAa,GAAG,YAAY,GAAG,iBAAiB,GAAG,gBAAgB,GAAG,kBAAkB,GAAG;AAElH,gCAAmE;AACjE,QAAM,eAAe,MAAM,MAAM,KAAK,MAAM;AAC5C,QAAM,MAAM,KAAK,OAAO,IAAI,OAAO,IAAI;AACvC,QAAM,MAAM,KAAK,IAAI,IAAI,QAAQ;AACjC,QAAM,MAAM,MAAM;AAClB,MAAI,OAAO,MAAQ,OAAM,UAAU;AACnC,YAAU,OAAO;AACjB,WAAS,QAAQ,IAAI,OAAO,QAAQ,GAAG,SAAS,GAAG,iBAAiB,IAAI,KAAK,QAAQ,GAAG,SAAS,GAAG;AAEpG,QAAM,WAAqB,OAAO,OAAO,MAAM,OAAO,SAAS,IAAI,CAAC,YAAY,QAAQ;AACxF,KAAG,YAAY,MAAM,OAAO,KAAK,WAAW;AAC5C,KAAG,WAAW,GAAG,YAAY,CAAE,UAAS,SAAS,qBAAqB,SAAS,SAAS;AACxF,KAAG,gBAAgB,GAAG,YAAY,GAAG,iBAAiB,SAAS,SAAS,qBAAqB,SAAS,SAAS;AAC/G,KAAG,eAAe,SAAS,SAAS,oBAAoB,SAAS,SAAS;AAC1E,KAAG,iBAAkB,OAAM,OAAO,KAAK,GAAG,YAAY,KAAK,QAAQ,iBAAkB,OAAM,OAAO,KAAK,GAAG,aAAa,KAAK,QAAQ,iBAAkB,OAAM,OAAO,KAAK,GAAG,eAAe,KAAK,QAAQ;AACvM,KAAG,iBAAkB,OAAM,OAAO,KAAK,GAAG,QAAQ,KAAK,QAAQ;AAC/D,KAAG,WAAW,MAAM,OAAO,KAAK,GAAG,IAAI,MAAM,QAAQ,WAAW,MAAM,OAAO,KAAK,GAAG,IAAI,MAAM,QAAQ;AAEvG,cAAY;AAEZ,MAAI,SAAS;AACX,QAAI,MAAM;AACV,WAAO,MAAM,OAAO;AAAA,SACf;AACL,UAAM,GAAG,QAAQ,MAAM,OAAO,KAAK,GAAG;AAAA;AAExC,MAAI,GAAG,YAAY,QAAQ,SAAS;AAClC,QAAI,MAAM;AACV,WAAO,MAAM,OAAO;AAAA,SACf;AACL,OAAG,YAAY,KAAK,MAAM,MAAM,QAAQ;AACxC,WAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,iBAAW,YAAY;AACrB,cAAM,MAAM,MAAM;AAClB,YAAI;AAAK,kBAAQ,MAAM,OAAO;AAAA,SAC7B;AAAA;AAAA;AAAA;AAKT,0BAA0B,MAAM;AAE9B,MAAI,OAAO,QAAQ,KAAK,OAAO,MAAM;AACrC,MAAI,OAAO,SAAS,KAAK,OAAO,MAAM;AACtC,MAAI,OAAO,MAAM,QAAQ;AACzB,QAAM,GAAG,QAAQ,SAAS,KAAK,QAAQ,IAAI;AAC3C,QAAM,GAAG,QAAQ,KAAK;AAAA;AAMxB,sBAAsB;AACpB,MAAI,kBAAkB,MAAM,SAAS,mBAAmB,MAAM,GAAG;AACjE,WAAS;AACT,QAAM,MAAM;AACZ,WAAS;AACT,QAAM,MAAM;AACZ,QAAM;AACN,QAAM;AACN,cAAY,MAAM;AAClB,QAAM,OAAO,MAAM;AACnB,MAAI,CAAC;AAAS,QAAI,4BAA4B;AAAA,OACzC;AACH,QAAI,oBAAoB;AACxB,UAAM,WAAW,KAAK;AAAA;AAExB,MAAI,IAAI,MAAM,UAAU;AAAA;AAG1B,OAAO,SAAS;",
+ "names": []
+}
diff --git a/demo/facerecognition/index.ts b/demo/facerecognition/index.ts
new file mode 100644
index 00000000..e8865c47
--- /dev/null
+++ b/demo/facerecognition/index.ts
@@ -0,0 +1,175 @@
+/**
+ * Human demo for browsers
+ * @default Human Library
+ * @summary
+ * @author
+ * @copyright
+ * @license MIT
+ */
+
+import { Human } from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
+
+const humanConfig = { // user configuration for human, used to fine-tune behavior
+ modelBasePath: '../../models',
+ filter: { equalization: true }, // lets run with histogram equilizer
+ face: {
+ enabled: true,
+ detector: { rotation: true, return: true }, // return tensor is not really needed except to draw detected face
+ description: { enabled: true },
+ iris: { enabled: true }, // needed to determine gaze direction
+ emotion: { enabled: false }, // not needed
+ antispoof: { enabled: true }, // enable optional antispoof as well
+ },
+ body: { enabled: false },
+ hand: { enabled: false },
+ object: { enabled: false },
+ gesture: { enabled: true },
+};
+
+const options = {
+ minConfidence: 0.6, // overal face confidence for box, face, gender, real
+ minSize: 224, // min input to face descriptor model before degradation
+ maxTime: 10000, // max time before giving up
+};
+
+const human = new Human(humanConfig); // create instance of human with overrides from user configuration
+
+human.env['perfadd'] = false; // is performance data showing instant or total values
+human.draw.options.font = 'small-caps 18px "Lato"'; // set font used to draw labels when using draw methods
+human.draw.options.lineHeight = 20;
+
+const dom = { // grab instances of dom objects so we dont have to look them up later
+ video: document.getElementById('video') as HTMLVideoElement,
+ canvas: document.getElementById('canvas') as HTMLCanvasElement,
+ log: document.getElementById('log') as HTMLPreElement,
+ fps: document.getElementById('fps') as HTMLPreElement,
+ status: document.getElementById('status') as HTMLPreElement,
+};
+const timestamp = { detect: 0, draw: 0 }; // holds information used to calculate performance and possible memory leaks
+const fps = { detect: 0, draw: 0 }; // holds calculated fps information for both detect and screen refresh
+let startTime = 0;
+
+const log = (...msg) => { // helper method to output messages
+ dom.log.innerText += msg.join(' ') + '\n';
+ // eslint-disable-next-line no-console
+ console.log(...msg);
+};
+const printFPS = (msg) => dom.fps.innerText = msg; // print status element
+const printStatus = (msg) => dom.status.innerText = 'status: ' + JSON.stringify(msg).replace(/"|{|}/g, '').replace(/,/g, ' | '); // print status element
+
+async function webCam() { // initialize webcam
+ printFPS('starting webcam...');
+ // @ts-ignore resizeMode is not yet defined in tslib
+ const cameraOptions: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };
+ const stream: MediaStream = await navigator.mediaDevices.getUserMedia(cameraOptions);
+ const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });
+ dom.video.srcObject = stream;
+ dom.video.play();
+ await ready;
+ dom.canvas.width = dom.video.videoWidth;
+ dom.canvas.height = dom.video.videoHeight;
+ const track: MediaStreamTrack = stream.getVideoTracks()[0];
+ const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';
+ const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';
+ const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';
+ log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });
+ dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click
+ if (dom.video.paused) dom.video.play();
+ else dom.video.pause();
+ };
+}
+
+async function detectionLoop() { // main detection loop
+ if (!dom.video.paused) {
+ await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result
+ const now = human.now();
+ fps.detect = 1000 / (now - timestamp.detect);
+ timestamp.detect = now;
+ requestAnimationFrame(detectionLoop); // start new frame immediately
+ }
+}
+
+const ok = { // must meet all rules
+ faceCount: false,
+ faceConfidence: false,
+ facingCenter: false,
+ eyesOpen: false,
+ blinkDetected: false,
+ faceSize: false,
+ antispoofCheck: false,
+ livenessCheck: false,
+ elapsedMs: 0,
+};
+const allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.faceConfidence && ok.antispoofCheck;
+
+async function validationLoop(): Promise { // main screen refresh loop
+ const interpolated = await human.next(human.result); // smoothen result using last-known results
+ await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen
+ await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.
+ const now = human.now();
+ fps.draw = 1000 / (now - timestamp.draw);
+ timestamp.draw = now;
+ printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status
+
+ const gestures: string[] = Object.values(human.result.gesture).map((gesture) => gesture.gesture); // flatten all gestures
+ ok.faceCount = human.result.face.length === 1; // must be exactly detected face
+ ok.eyesOpen = ok.eyesOpen || !(gestures.includes('blink left eye') || gestures.includes('blink right eye')); // blink validation is only ok once both eyes are open
+ ok.blinkDetected = ok.eyesOpen && ok.blinkDetected || gestures.includes('blink left eye') || gestures.includes('blink right eye'); // need to detect blink only once
+ ok.facingCenter = gestures.includes('facing center') && gestures.includes('looking center'); // must face camera and look at camera
+ ok.faceConfidence = (human.result.face[0].boxScore || 0) > options.minConfidence && (human.result.face[0].faceScore || 0) > options.minConfidence && (human.result.face[0].genderScore || 0) > options.minConfidence;
+ ok.antispoofCheck = (human.result.face[0].real || 0) > options.minConfidence;
+ ok.faceSize = human.result.face[0].box[2] >= options.minSize && human.result.face[0].box[3] >= options.minSize;
+
+ printStatus(ok);
+
+ if (allOk()) { // all criteria met
+ dom.video.pause();
+ return human.result.face;
+ } else {
+ human.tf.dispose(human.result.face[0].tensor); // results are not ok, so lets dispose tensor
+ }
+ if (ok.elapsedMs > options.maxTime) { // give up
+ dom.video.pause();
+ return human.result.face;
+ } else { // run again
+ ok.elapsedMs = Math.trunc(human.now() - startTime);
+ return new Promise((resolve) => {
+ setTimeout(async () => {
+ const res = await validationLoop(); // run validation loop until conditions are met
+ if (res) resolve(human.result.face); // recursive promise resolve
+ }, 30); // use to slow down refresh from max refresh rate to target of 30 fps
+ });
+ }
+}
+
+async function detectFace(face) {
+ // draw face and dispose face tensor immediatey afterwards
+ dom.canvas.width = face.tensor.shape[2];
+ dom.canvas.height = face.tensor.shape[1];
+ dom.canvas.style.width = '';
+ human.tf.browser.toPixels(face.tensor, dom.canvas);
+ human.tf.dispose(face.tensor);
+
+ // run detection using human.match and use face.embedding as input descriptor
+ // tbd
+}
+
+async function main() { // main entry point
+ log('human version:', human.version, '| tfjs version:', human.tf.version_core);
+ printFPS('loading...');
+ await human.load(); // preload all models
+ printFPS('initializing...');
+ await human.warmup(); // warmup function to initialize backend for future faster detection
+ await webCam(); // start webcam
+ await detectionLoop(); // start detection loop
+ startTime = human.now();
+ const face = await validationLoop(); // start validation loop
+ if (!allOk()) log('did not find valid input', face);
+ else {
+ log('found valid face', face);
+ await detectFace(face[0]);
+ }
+ dom.fps.style.display = 'none';
+}
+
+window.onload = main;
diff --git a/demo/typescript/index.html b/demo/typescript/index.html
index 3b7d0ba4..a74a14bc 100644
--- a/demo/typescript/index.html
+++ b/demo/typescript/index.html
@@ -26,6 +26,5 @@
-