diff --git a/.eslintrc.json b/.eslintrc.json index 9e86aa34..f713f5c7 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,93 +1,146 @@ { "globals": {}, - "env": { - "browser": true, - "commonjs": true, - "node": true, - "es2021": true - }, - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaVersion": 2021 - }, - "plugins": [ - "@typescript-eslint", - "html" - ], - "extends": [ - "airbnb-base", - "eslint:recommended", - "plugin:@typescript-eslint/eslint-recommended", - "plugin:@typescript-eslint/recommended", - "plugin:import/errors", - "plugin:import/warnings", - "plugin:json/recommended-with-comments", - "plugin:node/recommended", - "plugin:promise/recommended" + "overrides": [ + { + "files": ["**/*.ts"], + "env": { + "browser": true, + "commonjs": false, + "node": false, + "es2021": true + }, + "parser": "@typescript-eslint/parser", + "parserOptions": { + "ecmaVersion": "latest", + "project": ["./tsconfig.json"] + }, + "plugins": [ + "@typescript-eslint" + ], + "extends": [ + "airbnb-base", + "eslint:recommended", + "plugin:@typescript-eslint/eslint-recommended", + "plugin:@typescript-eslint/recommended", + "plugin:import/recommended", + "plugin:promise/recommended" + ], + "rules": { + "@typescript-eslint/ban-ts-comment":"off", + "@typescript-eslint/no-empty-interface":"off", + "@typescript-eslint/no-inferrable-types":"off", + "dot-notation":"off", + "guard-for-in":"off", + "import/extensions": ["off", "always"], + "import/no-unresolved":"off", + "import/prefer-default-export":"off", + "lines-between-class-members":"off", + "max-len": [1, 275, 3], + "no-async-promise-executor":"off", + "no-await-in-loop":"off", + "no-bitwise":"off", + "no-continue":"off", + "no-lonely-if":"off", + "no-mixed-operators":"off", + "no-param-reassign":"off", + "no-plusplus":"off", + "no-regex-spaces":"off", + "no-restricted-syntax":"off", + "no-return-assign":"off", + "object-curly-newline":"off", + "prefer-destructuring":"off", + "prefer-template":"off", + "radix":"off" + } + }, + { + "files": ["**/*.js"], + "env": { + "browser": true, + "commonjs": true, + "node": true, + "es2021": true + }, + "parserOptions": { + "sourceType": "module", + "ecmaVersion": "latest" + }, + "plugins": [ + ], + "extends": [ + "airbnb-base", + "eslint:recommended", + "plugin:node/recommended", + "plugin:promise/recommended" + ], + "rules": { + "dot-notation":"off", + "import/extensions": ["error", "always"], + "import/no-extraneous-dependencies":"off", + "max-len": [1, 275, 3], + "no-await-in-loop":"off", + "no-bitwise":"off", + "no-continue":"off", + "no-mixed-operators":"off", + "no-param-reassign":"off", + "no-plusplus":"off", + "no-regex-spaces":"off", + "no-restricted-syntax":"off", + "no-return-assign":"off", + "node/no-unsupported-features/es-syntax":"off", + "object-curly-newline":"off", + "prefer-destructuring":"off", + "prefer-template":"off", + "radix":"off" + } + }, + { + "files": ["**/*.json"], + "env": { + "browser": false, + "commonjs": false, + "node": false, + "es2021": false + }, + "parserOptions": { + "ecmaVersion": "latest" + }, + "plugins": [ + "json" + ], + "extends": [ + "plugin:json/recommended" + ] + }, + { + "files": ["**/*.html"], + "env": { + "browser": true, + "commonjs": false, + "node": false, + "es2021": false + }, + "parserOptions": { + "sourceType": "module", + "ecmaVersion": "latest" + }, + "parser": "@html-eslint/parser", + "extends": ["plugin:@html-eslint/recommended"], + "plugins": [ + "html", "@html-eslint" + ], + "rules": { + "@html-eslint/indent": ["error", 2], + "@html-eslint/element-newline":"off" + } + } ], "ignorePatterns": [ + "node_modules", "assets", + "dist", "demo/helpers/*.js", "demo/typescript/*.js", - "demo/faceid/*.js", - "dist", - "media", - "models", - "node_modules", - "types/human.d.ts" - ], - "rules": { - "@typescript-eslint/ban-ts-comment": "off", - "@typescript-eslint/explicit-module-boundary-types": "off", - "@typescript-eslint/no-shadow": "error", - "@typescript-eslint/no-var-requires": "off", - "@typescript-eslint/prefer-as-const": "off", - "@typescript-eslint/triple-slash-reference": "off", - "@typescript-eslint/no-inferrable-types": "off", - "@typescript-eslint/no-empty-interface": ["error", { "allowSingleExtends": true }], - "camelcase": "off", - "class-methods-use-this": "off", - "dot-notation": "off", - "func-names": "off", - "guard-for-in": "off", - "import/extensions": "off", - "import/named": "off", - "import/no-extraneous-dependencies": "off", - "import/no-named-as-default": "off", - "import/no-unresolved": "off", - "import/prefer-default-export": "off", - "lines-between-class-members": "off", - "max-len": [1, 275, 3], - "newline-per-chained-call": "off", - "no-async-promise-executor": "off", - "no-await-in-loop": "off", - "no-bitwise": "off", - "no-case-declarations":"off", - "no-continue": "off", - "no-else-return": "off", - "no-lonely-if": "off", - "no-loop-func": "off", - "no-mixed-operators": "off", - "no-param-reassign":"off", - "no-plusplus": "off", - "no-process-exit": "off", - "no-regex-spaces": "off", - "no-restricted-globals": "off", - "no-restricted-syntax": "off", - "no-return-assign": "off", - "no-shadow": "off", - "no-underscore-dangle": "off", - "node/no-missing-import": ["error", { "tryExtensions": [".js", ".json", ".ts"] }], - "node/no-unpublished-import": "off", - "node/no-unpublished-require": "off", - "node/no-unsupported-features/es-syntax": "off", - "node/shebang": "off", - "object-curly-newline": "off", - "prefer-destructuring": "off", - "prefer-template":"off", - "promise/always-return": "off", - "promise/catch-or-return": "off", - "promise/no-nesting": "off", - "radix": "off" - } + "demo/faceid/*.js" + ] } diff --git a/CHANGELOG.md b/CHANGELOG.md index 1845cacc..61c4c10e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,11 +9,12 @@ ## Changelog +### **HEAD -> main** 2022/08/20 mandic00@live.com + + ### **2.9.4** 2022/08/20 mandic00@live.com - -### **origin/main** 2022/08/19 mandic00@live.com - +- add browser test - add tensorflow library detection - fix wasm detection - enumerate additional models diff --git a/build.js b/build.js index 19ab01f4..74976951 100644 --- a/build.js +++ b/build.js @@ -1,10 +1,10 @@ const fs = require('fs'); const path = require('path'); -const log = require('@vladmandic/pilogger'); -const Build = require('@vladmandic/build').Build; -const APIExtractor = require('@microsoft/api-extractor'); -const tf = require('@tensorflow/tfjs-node'); -const package = require('./package.json'); +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require +const Build = require('@vladmandic/build').Build; // eslint-disable-line node/no-unpublished-require +const APIExtractor = require('@microsoft/api-extractor'); // eslint-disable-line node/no-unpublished-require +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require +const packageJSON = require('./package.json'); const logFile = 'test/build.log'; const modelsOut = 'models/models.json'; @@ -19,7 +19,7 @@ const modelsFolders = [ '../nanodet/models', ]; -const apiExtractorIgnoreList = [ // eslint-disable-line @typescript-eslint/no-unused-vars +const apiExtractorIgnoreList = [ // eslint-disable-line no-unused-vars 'ae-missing-release-tag', 'tsdoc-param-tag-missing-hyphen', 'tsdoc-escape-right-brace', @@ -73,7 +73,7 @@ async function analyzeModels() { async function main() { log.logFile(logFile); - log.data('Build', { name: package.name, version: package.version }); + log.data('Build', { name: packageJSON.name, version: packageJSON.version }); // run production build const build = new Build(); await build.run('production'); diff --git a/demo/faceid/index.js b/demo/faceid/index.js index efe3d385..4d28394c 100644 --- a/demo/faceid/index.js +++ b/demo/faceid/index.js @@ -4,6 +4,6 @@ author: ' */ -import{Human as H}from"../../dist/human.esm.js";var l,R="human",p="person",g=(...t)=>console.log("indexdb",...t);async function v(){return l?!0:new Promise(t=>{let i=indexedDB.open(R,1);i.onerror=n=>g("error:",n),i.onupgradeneeded=n=>{g("create:",n.target),l=n.target.result,l.createObjectStore(p,{keyPath:"id",autoIncrement:!0})},i.onsuccess=n=>{l=n.target.result,g("open:",l),t(!0)}})}async function x(){let t=[];return l||await v(),new Promise(i=>{let n=l.transaction([p],"readwrite").objectStore(p).openCursor(null,"next");n.onerror=s=>g("load error:",s),n.onsuccess=s=>{s.target.result?(t.push(s.target.result.value),s.target.result.continue()):i(t)}})}async function y(){return l||await v(),new Promise(t=>{let i=l.transaction([p],"readwrite").objectStore(p).count();i.onerror=n=>g("count error:",n),i.onsuccess=()=>t(i.result)})}async function C(t){l||await v();let i={name:t.name,descriptor:t.descriptor,image:t.image};l.transaction([p],"readwrite").objectStore(p).put(i),g("save:",i)}async function D(t){l||await v(),l.transaction([p],"readwrite").objectStore(p).delete(t.id),g("delete:",t)}var b={modelBasePath:"../../models",filter:{equalization:!0},face:{enabled:!0,detector:{rotation:!0,return:!0,cropFactor:1.6,mask:!1},description:{enabled:!0},iris:{enabled:!0},emotion:{enabled:!1},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!1},hand:{enabled:!1},object:{enabled:!1},gesture:{enabled:!0}},I={order:2,multiplier:25,min:.2,max:.8},c={minConfidence:.6,minSize:224,maxTime:1e4,blinkMin:10,blinkMax:800,threshold:.5,mask:b.face.detector.mask,rotation:b.face.detector.rotation,cropFactor:b.face.detector.cropFactor,...I},o={faceCount:!1,faceConfidence:!1,facingCenter:!1,lookingCenter:!1,blinkDetected:!1,faceSize:!1,antispoofCheck:!1,livenessCheck:!1,elapsedMs:0},M=()=>o.faceCount&&o.faceSize&&o.blinkDetected&&o.facingCenter&&o.lookingCenter&&o.faceConfidence&&o.antispoofCheck&&o.livenessCheck,r={face:null,record:null},u={start:0,end:0,time:0},a=new H(b);a.env.perfadd=!1;a.draw.options.font='small-caps 18px "Lato"';a.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("fps"),match:document.getElementById("match"),name:document.getElementById("name"),save:document.getElementById("save"),delete:document.getElementById("delete"),retry:document.getElementById("retry"),source:document.getElementById("source"),ok:document.getElementById("ok")},w={detect:0,draw:0},h={detect:0,draw:0},E=0,m=(...t)=>{e.log.innerText+=t.join(" ")+` -`,console.log(...t)},k=t=>e.fps.innerText=t;async function S(){k("starting webcam...");let t={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth}}},i=await navigator.mediaDevices.getUserMedia(t),n=new Promise(s=>{e.video.onloadeddata=()=>s(!0)});e.video.srcObject=i,e.video.play(),await n,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight,a.env.initial&&m("video:",e.video.videoWidth,e.video.videoHeight,"|",i.getVideoTracks()[0].label),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function T(){if(!e.video.paused){r.face&&r.face.tensor&&a.tf.dispose(r.face.tensor),await a.detect(e.video);let t=a.now();h.detect=1e3/(t-w.detect),w.detect=t,requestAnimationFrame(T)}}async function L(){let t=await a.next(a.result);await a.draw.canvas(e.video,e.canvas),await a.draw.all(e.canvas,t);let i=a.now();if(h.draw=1e3/(i-w.draw),w.draw=i,k(`fps: ${h.detect.toFixed(1).padStart(5," ")} detect | ${h.draw.toFixed(1).padStart(5," ")} draw`),o.faceCount=a.result.face.length===1,o.faceCount){let s=Object.values(a.result.gesture).map(f=>f.gesture);(s.includes("blink left eye")||s.includes("blink right eye"))&&(u.start=a.now()),u.start>0&&!s.includes("blink left eye")&&!s.includes("blink right eye")&&(u.end=a.now()),o.blinkDetected=o.blinkDetected||Math.abs(u.end-u.start)>c.blinkMin&&Math.abs(u.end-u.start)c.minConfidence&&(a.result.face[0].faceScore||0)>c.minConfidence,o.antispoofCheck=(a.result.face[0].real||0)>c.minConfidence,o.livenessCheck=(a.result.face[0].live||0)>c.minConfidence,o.faceSize=a.result.face[0].box[2]>=c.minSize&&a.result.face[0].box[3]>=c.minSize}let n=32;for(let[s,f]of Object.entries(o)){let d=document.getElementById(`ok-${s}`);d||(d=document.createElement("div"),d.innerText=s,d.className="ok",d.style.top=`${n}px`,e.ok.appendChild(d)),typeof f=="boolean"?d.style.backgroundColor=f?"lightgreen":"lightcoral":d.innerText=`${s}:${f}`,n+=28}return M()||o.elapsedMs>c.maxTime?(e.video.pause(),a.result.face[0]):(o.elapsedMs=Math.trunc(a.now()-E),new Promise(s=>{setTimeout(async()=>{await L()&&s(a.result.face[0])},30)}))}async function z(){var t,i,n,s;if(e.name.value.length>0){let f=(t=e.canvas.getContext("2d"))==null?void 0:t.getImageData(0,0,e.canvas.width,e.canvas.height),d={id:0,name:e.name.value,descriptor:(i=r.face)==null?void 0:i.embedding,image:f};await C(d),m("saved face record:",d.name,"descriptor length:",(s=(n=r.face)==null?void 0:n.embedding)==null?void 0:s.length),m("known face records:",await y())}else m("invalid name")}async function P(){r.record&&r.record.id>0&&await D(r.record)}async function j(){var s,f;if((s=e.canvas.getContext("2d"))==null||s.clearRect(0,0,c.minSize,c.minSize),!r.face||!r.face.tensor||!r.face.embedding)return!1;if(console.log("face record:",r.face),a.tf.browser.toPixels(r.face.tensor,e.canvas),await y()===0)return m("face database is empty"),document.body.style.background="black",e.delete.style.display="none",!1;let t=await x(),i=t.map(d=>d.descriptor).filter(d=>d.length>0),n=await a.match(r.face.embedding,i,I);return r.record=t[n.index]||null,r.record&&(m(`best match: ${r.record.name} | id: ${r.record.id} | similarity: ${Math.round(1e3*n.similarity)/10}%`),e.name.value=r.record.name,e.source.style.display="",(f=e.source.getContext("2d"))==null||f.putImageData(r.record.image,0,0)),document.body.style.background=n.similarity>c.threshold?"darkgreen":"maroon",n.similarity>c.threshold}async function B(){var t,i,n,s;return o.faceCount=!1,o.faceConfidence=!1,o.facingCenter=!1,o.blinkDetected=!1,o.faceSize=!1,o.antispoofCheck=!1,o.livenessCheck=!1,o.elapsedMs=0,e.match.style.display="none",e.retry.style.display="none",e.source.style.display="none",document.body.style.background="black",await S(),await T(),E=a.now(),r.face=await L(),e.canvas.width=((i=(t=r.face)==null?void 0:t.tensor)==null?void 0:i.shape[1])||c.minSize,e.canvas.height=((s=(n=r.face)==null?void 0:n.tensor)==null?void 0:s.shape[0])||c.minSize,e.source.width=e.canvas.width,e.source.height=e.canvas.height,e.canvas.style.width="",e.match.style.display="flex",e.save.style.display="flex",e.delete.style.display="flex",e.retry.style.display="block",M()?j():(m("did not find valid face"),!1)}async function q(){var t,i,n;m("human version:",a.version,"| tfjs version:",a.tf.version["tfjs-core"]),m("face embedding model:",(t=b.face.description)!=null&&t.enabled?"faceres":"",(i=b.face.mobilefacenet)!=null&&i.enabled?"mobilefacenet":"",(n=b.face.insightface)!=null&&n.enabled?"insightface":""),m("options:",JSON.stringify(c).replace(/{|}|"|\[|\]/g,"").replace(/,/g," ")),k("loading..."),m("known face records:",await y()),await S(),await a.load(),k("initializing..."),e.retry.addEventListener("click",B),e.save.addEventListener("click",z),e.delete.addEventListener("click",P),await a.warmup(),await B()}window.onload=q; +import*as B from"../../dist/human.esm.js";var l,R="human",g="person",p=(...t)=>console.log("indexdb",...t);async function v(){return l?!0:new Promise(t=>{let o=indexedDB.open(R,1);o.onerror=s=>p("error:",s),o.onupgradeneeded=s=>{p("create:",s.target),l=s.target.result,l.createObjectStore(g,{keyPath:"id",autoIncrement:!0})},o.onsuccess=s=>{l=s.target.result,p("open:",l),t(!0)}})}async function x(){let t=[];return l||await v(),new Promise(o=>{let s=l.transaction([g],"readwrite").objectStore(g).openCursor(null,"next");s.onerror=i=>p("load error:",i),s.onsuccess=i=>{i.target.result?(t.push(i.target.result.value),i.target.result.continue()):o(t)}})}async function y(){return l||await v(),new Promise(t=>{let o=l.transaction([g],"readwrite").objectStore(g).count();o.onerror=s=>p("count error:",s),o.onsuccess=()=>t(o.result)})}async function C(t){l||await v();let o={name:t.name,descriptor:t.descriptor,image:t.image};l.transaction([g],"readwrite").objectStore(g).put(o),p("save:",o)}async function D(t){l||await v(),l.transaction([g],"readwrite").objectStore(g).delete(t.id),p("delete:",t)}var b={modelBasePath:"../../models",filter:{equalization:!0},face:{enabled:!0,detector:{rotation:!0,return:!0,cropFactor:1.6,mask:!1},description:{enabled:!0},iris:{enabled:!0},emotion:{enabled:!1},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!1},hand:{enabled:!1},object:{enabled:!1},gesture:{enabled:!0}},E={order:2,multiplier:25,min:.2,max:.8},c={minConfidence:.6,minSize:224,maxTime:1e4,blinkMin:10,blinkMax:800,threshold:.5,mask:b.face.detector.mask,rotation:b.face.detector.rotation,cropFactor:b.face.detector.cropFactor,...E},n={faceCount:!1,faceConfidence:!1,facingCenter:!1,lookingCenter:!1,blinkDetected:!1,faceSize:!1,antispoofCheck:!1,livenessCheck:!1,elapsedMs:0},I=()=>n.faceCount&&n.faceSize&&n.blinkDetected&&n.facingCenter&&n.lookingCenter&&n.faceConfidence&&n.antispoofCheck&&n.livenessCheck,r={face:null,record:null},u={start:0,end:0,time:0},a=new B.Human(b);a.env.perfadd=!1;a.draw.options.font='small-caps 18px "Lato"';a.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("fps"),match:document.getElementById("match"),name:document.getElementById("name"),save:document.getElementById("save"),delete:document.getElementById("delete"),retry:document.getElementById("retry"),source:document.getElementById("source"),ok:document.getElementById("ok")},w={detect:0,draw:0},h={detect:0,draw:0},S=0,m=(...t)=>{e.log.innerText+=t.join(" ")+` +`,console.log(...t)},k=t=>e.fps.innerText=t;async function H(){k("starting webcam...");let t={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth}}},o=await navigator.mediaDevices.getUserMedia(t),s=new Promise(i=>{e.video.onloadeddata=()=>i(!0)});e.video.srcObject=o,e.video.play(),await s,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight,a.env.initial&&m("video:",e.video.videoWidth,e.video.videoHeight,"|",o.getVideoTracks()[0].label),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function T(){if(!e.video.paused){r.face&&r.face.tensor&&a.tf.dispose(r.face.tensor),await a.detect(e.video);let t=a.now();h.detect=1e3/(t-w.detect),w.detect=t,requestAnimationFrame(T)}}async function L(){let t=await a.next(a.result);await a.draw.canvas(e.video,e.canvas),await a.draw.all(e.canvas,t);let o=a.now();if(h.draw=1e3/(o-w.draw),w.draw=o,k(`fps: ${h.detect.toFixed(1).padStart(5," ")} detect | ${h.draw.toFixed(1).padStart(5," ")} draw`),n.faceCount=a.result.face.length===1,n.faceCount){let i=Object.values(a.result.gesture).map(f=>f.gesture);(i.includes("blink left eye")||i.includes("blink right eye"))&&(u.start=a.now()),u.start>0&&!i.includes("blink left eye")&&!i.includes("blink right eye")&&(u.end=a.now()),n.blinkDetected=n.blinkDetected||Math.abs(u.end-u.start)>c.blinkMin&&Math.abs(u.end-u.start)c.minConfidence&&(a.result.face[0].faceScore||0)>c.minConfidence,n.antispoofCheck=(a.result.face[0].real||0)>c.minConfidence,n.livenessCheck=(a.result.face[0].live||0)>c.minConfidence,n.faceSize=a.result.face[0].box[2]>=c.minSize&&a.result.face[0].box[3]>=c.minSize}let s=32;for(let[i,f]of Object.entries(n)){let d=document.getElementById(`ok-${i}`);d||(d=document.createElement("div"),d.innerText=i,d.className="ok",d.style.top=`${s}px`,e.ok.appendChild(d)),typeof f=="boolean"?d.style.backgroundColor=f?"lightgreen":"lightcoral":d.innerText=`${i}:${f}`,s+=28}return I()||n.elapsedMs>c.maxTime?(e.video.pause(),a.result.face[0]):(n.elapsedMs=Math.trunc(a.now()-S),new Promise(i=>{setTimeout(async()=>{await L()&&i(a.result.face[0])},30)}))}async function z(){var t,o,s,i;if(e.name.value.length>0){let f=(t=e.canvas.getContext("2d"))==null?void 0:t.getImageData(0,0,e.canvas.width,e.canvas.height),d={id:0,name:e.name.value,descriptor:(o=r.face)==null?void 0:o.embedding,image:f};await C(d),m("saved face record:",d.name,"descriptor length:",(i=(s=r.face)==null?void 0:s.embedding)==null?void 0:i.length),m("known face records:",await y())}else m("invalid name")}async function P(){r.record&&r.record.id>0&&await D(r.record)}async function j(){var i,f;if((i=e.canvas.getContext("2d"))==null||i.clearRect(0,0,c.minSize,c.minSize),!r.face||!r.face.tensor||!r.face.embedding)return!1;if(console.log("face record:",r.face),a.tf.browser.toPixels(r.face.tensor,e.canvas),await y()===0)return m("face database is empty"),document.body.style.background="black",e.delete.style.display="none",!1;let t=await x(),o=t.map(d=>d.descriptor).filter(d=>d.length>0),s=await a.match(r.face.embedding,o,E);return r.record=t[s.index]||null,r.record&&(m(`best match: ${r.record.name} | id: ${r.record.id} | similarity: ${Math.round(1e3*s.similarity)/10}%`),e.name.value=r.record.name,e.source.style.display="",(f=e.source.getContext("2d"))==null||f.putImageData(r.record.image,0,0)),document.body.style.background=s.similarity>c.threshold?"darkgreen":"maroon",s.similarity>c.threshold}async function M(){var t,o;return n.faceCount=!1,n.faceConfidence=!1,n.facingCenter=!1,n.blinkDetected=!1,n.faceSize=!1,n.antispoofCheck=!1,n.livenessCheck=!1,n.elapsedMs=0,e.match.style.display="none",e.retry.style.display="none",e.source.style.display="none",document.body.style.background="black",await H(),await T(),S=a.now(),r.face=await L(),e.canvas.width=((t=r.face.tensor)==null?void 0:t.shape[1])||c.minSize,e.canvas.height=((o=r.face.tensor)==null?void 0:o.shape[0])||c.minSize,e.source.width=e.canvas.width,e.source.height=e.canvas.height,e.canvas.style.width="",e.match.style.display="flex",e.save.style.display="flex",e.delete.style.display="flex",e.retry.style.display="block",I()?j():(m("did not find valid face"),!1)}async function q(){var t,o;m("human version:",a.version,"| tfjs version:",a.tf.version["tfjs-core"]),m("face embedding model:",b.face.description.enabled?"faceres":"",(t=b.face.mobilefacenet)!=null&&t.enabled?"mobilefacenet":"",(o=b.face.insightface)!=null&&o.enabled?"insightface":""),m("options:",JSON.stringify(c).replace(/{|}|"|\[|\]/g,"").replace(/,/g," ")),k("loading..."),m("known face records:",await y()),await H(),await a.load(),k("initializing..."),e.retry.addEventListener("click",M),e.save.addEventListener("click",z),e.delete.addEventListener("click",P),await a.warmup(),await M()}window.onload=q; //# sourceMappingURL=index.js.map diff --git a/demo/faceid/index.js.map b/demo/faceid/index.js.map index 708d5f43..7f6023f4 100644 --- a/demo/faceid/index.js.map +++ b/demo/faceid/index.js.map @@ -1,7 +1,7 @@ { "version": 3, "sources": ["index.ts", "indexdb.ts"], - "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport { Human, TensorLike, FaceResult } from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\nimport * as indexDb from './indexdb'; // methods to deal with indexdb\n\nconst humanConfig = { // user configuration for human, used to fine-tune behavior\n modelBasePath: '../../models',\n filter: { equalization: true }, // lets run with histogram equilizer\n face: {\n enabled: true,\n detector: { rotation: true, return: true, cropFactor: 1.6, mask: false }, // return tensor is used to get detected face image\n description: { enabled: true }, // default model for face descriptor extraction is faceres\n // mobilefacenet: { enabled: true, modelPath: 'https://vladmandic.github.io/human-models/models/mobilefacenet.json' }, // alternative model\n // insightface: { enabled: true, modelPath: 'https://vladmandic.github.io/insightface/models/insightface-mobilenet-swish.json' }, // alternative model\n iris: { enabled: true }, // needed to determine gaze direction\n emotion: { enabled: false }, // not needed\n antispoof: { enabled: true }, // enable optional antispoof module\n liveness: { enabled: true }, // enable optional liveness module\n },\n body: { enabled: false },\n hand: { enabled: false },\n object: { enabled: false },\n gesture: { enabled: true }, // parses face and iris gestures\n};\n\n// const matchOptions = { order: 2, multiplier: 1000, min: 0.0, max: 1.0 }; // for embedding model\nconst matchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 }; // for faceres model\n\nconst options = {\n minConfidence: 0.6, // overal face confidence for box, face, gender, real, live\n minSize: 224, // min input to face descriptor model before degradation\n maxTime: 10000, // max time before giving up\n blinkMin: 10, // minimum duration of a valid blink\n blinkMax: 800, // maximum duration of a valid blink\n threshold: 0.5, // minimum similarity\n mask: humanConfig.face.detector.mask,\n rotation: humanConfig.face.detector.rotation,\n cropFactor: humanConfig.face.detector.cropFactor,\n ...matchOptions,\n};\n\nconst ok = { // must meet all rules\n faceCount: false,\n faceConfidence: false,\n facingCenter: false,\n lookingCenter: false,\n blinkDetected: false,\n faceSize: false,\n antispoofCheck: false,\n livenessCheck: false,\n elapsedMs: 0, // total time while waiting for valid face\n};\nconst allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.lookingCenter && ok.faceConfidence && ok.antispoofCheck && ok.livenessCheck;\nconst current: { face: FaceResult | null, record: indexDb.FaceRecord | null } = { face: null, record: null }; // current face record and matched database record\n\nconst blink = { // internal timers for blink start/end/duration\n start: 0,\n end: 0,\n time: 0,\n};\n\n// let db: Array<{ name: string, source: string, embedding: number[] }> = []; // holds loaded face descriptor database\nconst human = new Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env['perfadd'] = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('fps') as HTMLPreElement,\n match: document.getElementById('match') as HTMLDivElement,\n name: document.getElementById('name') as HTMLInputElement,\n save: document.getElementById('save') as HTMLSpanElement,\n delete: document.getElementById('delete') as HTMLSpanElement,\n retry: document.getElementById('retry') as HTMLDivElement,\n source: document.getElementById('source') as HTMLCanvasElement,\n ok: document.getElementById('ok') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detect: 0, draw: 0 }; // holds calculated fps information for both detect and screen refresh\nlet startTime = 0;\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n // eslint-disable-next-line no-console\n console.log(...msg);\n};\nconst printFPS = (msg) => dom.fps.innerText = msg; // print status element\n\nasync function webCam() { // initialize webcam\n printFPS('starting webcam...');\n // @ts-ignore resizeMode is not yet defined in tslib\n const cameraOptions: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(cameraOptions);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n if (human.env.initial) log('video:', dom.video.videoWidth, dom.video.videoHeight, '|', stream.getVideoTracks()[0].label);\n dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click\n if (dom.video.paused) dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n if (current.face && current.face.tensor) human.tf.dispose(current.face.tensor); // dispose previous tensor\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const now = human.now();\n fps.detect = 1000 / (now - timestamp.detect);\n timestamp.detect = now;\n requestAnimationFrame(detectionLoop); // start new frame immediately\n }\n}\n\nasync function validationLoop(): Promise { // main screen refresh loop\n const interpolated = await human.next(human.result); // smoothen result using last-known results\n await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n const now = human.now();\n fps.draw = 1000 / (now - timestamp.draw);\n timestamp.draw = now;\n printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status\n ok.faceCount = human.result.face.length === 1; // must be exactly detected face\n if (ok.faceCount) { // skip the rest if no face\n const gestures: string[] = Object.values(human.result.gesture).map((gesture) => gesture.gesture); // flatten all gestures\n if (gestures.includes('blink left eye') || gestures.includes('blink right eye')) blink.start = human.now(); // blink starts when eyes get closed\n if (blink.start > 0 && !gestures.includes('blink left eye') && !gestures.includes('blink right eye')) blink.end = human.now(); // if blink started how long until eyes are back open\n ok.blinkDetected = ok.blinkDetected || (Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax);\n if (ok.blinkDetected && blink.time === 0) blink.time = Math.trunc(blink.end - blink.start);\n ok.facingCenter = gestures.includes('facing center');\n ok.lookingCenter = gestures.includes('looking center'); // must face camera and look at camera\n ok.faceConfidence = (human.result.face[0].boxScore || 0) > options.minConfidence && (human.result.face[0].faceScore || 0) > options.minConfidence;\n ok.antispoofCheck = (human.result.face[0].real || 0) > options.minConfidence;\n ok.livenessCheck = (human.result.face[0].live || 0) > options.minConfidence;\n ok.faceSize = human.result.face[0].box[2] >= options.minSize && human.result.face[0].box[3] >= options.minSize;\n }\n let y = 32;\n for (const [key, val] of Object.entries(ok)) {\n let el = document.getElementById(`ok-${key}`);\n if (!el) {\n el = document.createElement('div');\n el.innerText = key;\n el.className = 'ok';\n el.style.top = `${y}px`;\n dom.ok.appendChild(el);\n }\n if (typeof val === 'boolean') el.style.backgroundColor = val ? 'lightgreen' : 'lightcoral';\n else el.innerText = `${key}:${val}`;\n y += 28;\n }\n if (allOk()) { // all criteria met\n dom.video.pause();\n return human.result.face[0];\n }\n if (ok.elapsedMs > options.maxTime) { // give up\n dom.video.pause();\n return human.result.face[0];\n } else { // run again\n ok.elapsedMs = Math.trunc(human.now() - startTime);\n return new Promise((resolve) => {\n setTimeout(async () => {\n const res = await validationLoop(); // run validation loop until conditions are met\n if (res) resolve(human.result.face[0]); // recursive promise resolve\n }, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n });\n }\n}\n\nasync function saveRecords() {\n if (dom.name.value.length > 0) {\n const image = dom.canvas.getContext('2d')?.getImageData(0, 0, dom.canvas.width, dom.canvas.height) as ImageData;\n const rec = { id: 0, name: dom.name.value, descriptor: current.face?.embedding as number[], image };\n await indexDb.save(rec);\n log('saved face record:', rec.name, 'descriptor length:', current.face?.embedding?.length);\n log('known face records:', await indexDb.count());\n } else {\n log('invalid name');\n }\n}\n\nasync function deleteRecord() {\n if (current.record && current.record.id > 0) {\n await indexDb.remove(current.record);\n }\n}\n\nasync function detectFace() {\n dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize);\n if (!current.face || !current.face.tensor || !current.face.embedding) return false;\n // eslint-disable-next-line no-console\n console.log('face record:', current.face);\n human.tf.browser.toPixels(current.face.tensor as unknown as TensorLike, dom.canvas);\n if (await indexDb.count() === 0) {\n log('face database is empty');\n document.body.style.background = 'black';\n dom.delete.style.display = 'none';\n return false;\n }\n const db = await indexDb.load();\n const descriptors = db.map((rec) => rec.descriptor).filter((desc) => desc.length > 0);\n const res = await human.match(current.face.embedding, descriptors, matchOptions);\n current.record = db[res.index] || null;\n if (current.record) {\n log(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1000 * res.similarity) / 10}%`);\n dom.name.value = current.record.name;\n dom.source.style.display = '';\n dom.source.getContext('2d')?.putImageData(current.record.image, 0, 0);\n }\n document.body.style.background = res.similarity > options.threshold ? 'darkgreen' : 'maroon';\n return res.similarity > options.threshold;\n}\n\nasync function main() { // main entry point\n ok.faceCount = false;\n ok.faceConfidence = false;\n ok.facingCenter = false;\n ok.blinkDetected = false;\n ok.faceSize = false;\n ok.antispoofCheck = false;\n ok.livenessCheck = false;\n ok.elapsedMs = 0;\n dom.match.style.display = 'none';\n dom.retry.style.display = 'none';\n dom.source.style.display = 'none';\n document.body.style.background = 'black';\n await webCam();\n await detectionLoop(); // start detection loop\n startTime = human.now();\n current.face = await validationLoop(); // start validation loop\n dom.canvas.width = current.face?.tensor?.shape[1] || options.minSize;\n dom.canvas.height = current.face?.tensor?.shape[0] || options.minSize;\n dom.source.width = dom.canvas.width;\n dom.source.height = dom.canvas.height;\n dom.canvas.style.width = '';\n dom.match.style.display = 'flex';\n dom.save.style.display = 'flex';\n dom.delete.style.display = 'flex';\n dom.retry.style.display = 'block';\n if (!allOk()) { // is all criteria met?\n log('did not find valid face');\n return false;\n } else {\n return detectFace();\n }\n}\n\nasync function init() {\n log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);\n log('face embedding model:', humanConfig.face['description']?.enabled ? 'faceres' : '', humanConfig.face['mobilefacenet']?.enabled ? 'mobilefacenet' : '', humanConfig.face['insightface']?.enabled ? 'insightface' : '');\n log('options:', JSON.stringify(options).replace(/{|}|\"|\\[|\\]/g, '').replace(/,/g, ' '));\n printFPS('loading...');\n log('known face records:', await indexDb.count());\n await webCam(); // start webcam\n await human.load(); // preload all models\n printFPS('initializing...');\n dom.retry.addEventListener('click', main);\n dom.save.addEventListener('click', saveRecords);\n dom.delete.addEventListener('click', deleteRecord);\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await main();\n}\n\nwindow.onload = init;\n", "let db: IDBDatabase; // instance of indexdb\n\nconst database = 'human';\nconst table = 'person';\n\nexport type FaceRecord = { id: number, name: string, descriptor: number[], image: ImageData };\n\n// eslint-disable-next-line no-console\nconst log = (...msg) => console.log('indexdb', ...msg);\n\nexport async function open() {\n if (db) return true;\n return new Promise((resolve) => {\n const request: IDBOpenDBRequest = indexedDB.open(database, 1);\n request.onerror = (evt) => log('error:', evt);\n request.onupgradeneeded = (evt: IDBVersionChangeEvent) => { // create if doesnt exist\n log('create:', evt.target);\n db = (evt.target as IDBOpenDBRequest).result;\n db.createObjectStore(table, { keyPath: 'id', autoIncrement: true });\n };\n request.onsuccess = (evt) => { // open\n db = (evt.target as IDBOpenDBRequest).result as IDBDatabase;\n log('open:', db);\n resolve(true);\n };\n });\n}\n\nexport async function load(): Promise {\n const faceDB: Array = [];\n if (!db) await open(); // open or create if not already done\n return new Promise((resolve) => {\n const cursor: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).openCursor(null, 'next');\n cursor.onerror = (evt) => log('load error:', evt);\n cursor.onsuccess = (evt) => {\n if ((evt.target as IDBRequest).result) {\n faceDB.push((evt.target as IDBRequest).result.value);\n (evt.target as IDBRequest).result.continue();\n } else {\n resolve(faceDB);\n }\n };\n });\n}\n\nexport async function count(): Promise {\n if (!db) await open(); // open or create if not already done\n return new Promise((resolve) => {\n const store: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).count();\n store.onerror = (evt) => log('count error:', evt);\n store.onsuccess = () => resolve(store.result);\n });\n}\n\nexport async function save(faceRecord: FaceRecord) {\n if (!db) await open(); // open or create if not already done\n const newRecord = { name: faceRecord.name, descriptor: faceRecord.descriptor, image: faceRecord.image }; // omit id as its autoincrement\n db.transaction([table], 'readwrite').objectStore(table).put(newRecord);\n log('save:', newRecord);\n}\n\nexport async function remove(faceRecord: FaceRecord) {\n if (!db) await open(); // open or create if not already done\n db.transaction([table], 'readwrite').objectStore(table).delete(faceRecord.id); // delete based on id\n log('delete:', faceRecord);\n}\n"], - "mappings": ";;;;;;AASA,OAAS,SAAAA,MAAqC,0BCT9C,IAAIC,EAEEC,EAAW,QACXC,EAAQ,SAKRC,EAAM,IAAIC,IAAQ,QAAQ,IAAI,UAAW,GAAGA,CAAG,EAErD,eAAsBC,GAAO,CAC3B,OAAIL,EAAW,GACR,IAAI,QAASM,GAAY,CAC9B,IAAMC,EAA4B,UAAU,KAAKN,EAAU,CAAC,EAC5DM,EAAQ,QAAWC,GAAQL,EAAI,SAAUK,CAAG,EAC5CD,EAAQ,gBAAmBC,GAA+B,CACxDL,EAAI,UAAWK,EAAI,MAAM,EACzBR,EAAMQ,EAAI,OAA4B,OACtCR,EAAG,kBAAkBE,EAAO,CAAE,QAAS,KAAM,cAAe,EAAK,CAAC,CACpE,EACAK,EAAQ,UAAaC,GAAQ,CAC3BR,EAAMQ,EAAI,OAA4B,OACtCL,EAAI,QAASH,CAAE,EACfM,EAAQ,EAAI,CACd,CACF,CAAC,CACH,CAEA,eAAsBG,GAA8B,CAClD,IAAMC,EAA4B,CAAC,EACnC,OAAKV,GAAI,MAAMK,EAAK,EACb,IAAI,QAASC,GAAY,CAC9B,IAAMK,EAAqBX,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,WAAW,KAAM,MAAM,EAC1GS,EAAO,QAAWH,GAAQL,EAAI,cAAeK,CAAG,EAChDG,EAAO,UAAaH,GAAQ,CACrBA,EAAI,OAAsB,QAC7BE,EAAO,KAAMF,EAAI,OAAsB,OAAO,KAAK,EAClDA,EAAI,OAAsB,OAAO,SAAS,GAE3CF,EAAQI,CAAM,CAElB,CACF,CAAC,CACH,CAEA,eAAsBE,GAAyB,CAC7C,OAAKZ,GAAI,MAAMK,EAAK,EACb,IAAI,QAASC,GAAY,CAC9B,IAAMO,EAAoBb,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,MAAM,EACxFW,EAAM,QAAWL,GAAQL,EAAI,eAAgBK,CAAG,EAChDK,EAAM,UAAY,IAAMP,EAAQO,EAAM,MAAM,CAC9C,CAAC,CACH,CAEA,eAAsBC,EAAKC,EAAwB,CAC5Cf,GAAI,MAAMK,EAAK,EACpB,IAAMW,EAAY,CAAE,KAAMD,EAAW,KAAM,WAAYA,EAAW,WAAY,MAAOA,EAAW,KAAM,EACtGf,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,IAAIc,CAAS,EACrEb,EAAI,QAASa,CAAS,CACxB,CAEA,eAAsBC,EAAOF,EAAwB,CAC9Cf,GAAI,MAAMK,EAAK,EACpBL,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,OAAOa,EAAW,EAAE,EAC5EZ,EAAI,UAAWY,CAAU,CAC3B,CDrDA,IAAMG,EAAc,CAClB,cAAe,eACf,OAAQ,CAAE,aAAc,EAAK,EAC7B,KAAM,CACJ,QAAS,GACT,SAAU,CAAE,SAAU,GAAM,OAAQ,GAAM,WAAY,IAAK,KAAM,EAAM,EACvE,YAAa,CAAE,QAAS,EAAK,EAG7B,KAAM,CAAE,QAAS,EAAK,EACtB,QAAS,CAAE,QAAS,EAAM,EAC1B,UAAW,CAAE,QAAS,EAAK,EAC3B,SAAU,CAAE,QAAS,EAAK,CAC5B,EACA,KAAM,CAAE,QAAS,EAAM,EACvB,KAAM,CAAE,QAAS,EAAM,EACvB,OAAQ,CAAE,QAAS,EAAM,EACzB,QAAS,CAAE,QAAS,EAAK,CAC3B,EAGMC,EAAe,CAAE,MAAO,EAAG,WAAY,GAAI,IAAK,GAAK,IAAK,EAAI,EAE9DC,EAAU,CACd,cAAe,GACf,QAAS,IACT,QAAS,IACT,SAAU,GACV,SAAU,IACV,UAAW,GACX,KAAMF,EAAY,KAAK,SAAS,KAChC,SAAUA,EAAY,KAAK,SAAS,SACpC,WAAYA,EAAY,KAAK,SAAS,WACtC,GAAGC,CACL,EAEME,EAAK,CACT,UAAW,GACX,eAAgB,GAChB,aAAc,GACd,cAAe,GACf,cAAe,GACf,SAAU,GACV,eAAgB,GAChB,cAAe,GACf,UAAW,CACb,EACMC,EAAQ,IAAMD,EAAG,WAAaA,EAAG,UAAYA,EAAG,eAAiBA,EAAG,cAAgBA,EAAG,eAAiBA,EAAG,gBAAkBA,EAAG,gBAAkBA,EAAG,cACrJE,EAA0E,CAAE,KAAM,KAAM,OAAQ,IAAK,EAErGC,EAAQ,CACZ,MAAO,EACP,IAAK,EACL,KAAM,CACR,EAGMC,EAAQ,IAAIC,EAAMR,CAAW,EAEnCO,EAAM,IAAI,QAAa,GACvBA,EAAM,KAAK,QAAQ,KAAO,yBAC1BA,EAAM,KAAK,QAAQ,WAAa,GAEhC,IAAME,EAAM,CACV,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,IAAK,SAAS,eAAe,KAAK,EAClC,IAAK,SAAS,eAAe,KAAK,EAClC,MAAO,SAAS,eAAe,OAAO,EACtC,KAAM,SAAS,eAAe,MAAM,EACpC,KAAM,SAAS,eAAe,MAAM,EACpC,OAAQ,SAAS,eAAe,QAAQ,EACxC,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,GAAI,SAAS,eAAe,IAAI,CAClC,EACMC,EAAY,CAAE,OAAQ,EAAG,KAAM,CAAE,EACjCC,EAAM,CAAE,OAAQ,EAAG,KAAM,CAAE,EAC7BC,EAAY,EAEVC,EAAM,IAAIC,IAAQ,CACtBL,EAAI,IAAI,WAAaK,EAAI,KAAK,GAAG,EAAI;AAAA,EAErC,QAAQ,IAAI,GAAGA,CAAG,CACpB,EACMC,EAAYD,GAAQL,EAAI,IAAI,UAAYK,EAE9C,eAAeE,GAAS,CACtBD,EAAS,oBAAoB,EAE7B,IAAME,EAAwC,CAAE,MAAO,GAAO,MAAO,CAAE,WAAY,OAAQ,WAAY,OAAQ,MAAO,CAAE,MAAO,SAAS,KAAK,WAAY,CAAE,CAAE,EACvJC,EAAsB,MAAM,UAAU,aAAa,aAAaD,CAAa,EAC7EE,EAAQ,IAAI,QAASC,GAAY,CAAEX,EAAI,MAAM,aAAe,IAAMW,EAAQ,EAAI,CAAG,CAAC,EACxFX,EAAI,MAAM,UAAYS,EACtBT,EAAI,MAAM,KAAK,EACf,MAAMU,EACNV,EAAI,OAAO,MAAQA,EAAI,MAAM,WAC7BA,EAAI,OAAO,OAASA,EAAI,MAAM,YAC1BF,EAAM,IAAI,SAASM,EAAI,SAAUJ,EAAI,MAAM,WAAYA,EAAI,MAAM,YAAa,IAAKS,EAAO,eAAe,EAAE,GAAG,KAAK,EACvHT,EAAI,OAAO,QAAU,IAAM,CACrBA,EAAI,MAAM,OAAQA,EAAI,MAAM,KAAK,EAChCA,EAAI,MAAM,MAAM,CACvB,CACF,CAEA,eAAeY,GAAgB,CAC7B,GAAI,CAACZ,EAAI,MAAM,OAAQ,CACjBJ,EAAQ,MAAQA,EAAQ,KAAK,QAAQE,EAAM,GAAG,QAAQF,EAAQ,KAAK,MAAM,EAC7E,MAAME,EAAM,OAAOE,EAAI,KAAK,EAC5B,IAAMa,EAAMf,EAAM,IAAI,EACtBI,EAAI,OAAS,KAAQW,EAAMZ,EAAU,QACrCA,EAAU,OAASY,EACnB,sBAAsBD,CAAa,CACrC,CACF,CAEA,eAAeE,GAAsC,CACnD,IAAMC,EAAe,MAAMjB,EAAM,KAAKA,EAAM,MAAM,EAClD,MAAMA,EAAM,KAAK,OAAOE,EAAI,MAAOA,EAAI,MAAM,EAC7C,MAAMF,EAAM,KAAK,IAAIE,EAAI,OAAQe,CAAY,EAC7C,IAAMF,EAAMf,EAAM,IAAI,EAKtB,GAJAI,EAAI,KAAO,KAAQW,EAAMZ,EAAU,MACnCA,EAAU,KAAOY,EACjBP,EAAS,QAAQJ,EAAI,OAAO,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,cAAcA,EAAI,KAAK,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,QAAQ,EAC/GR,EAAG,UAAYI,EAAM,OAAO,KAAK,SAAW,EACxCJ,EAAG,UAAW,CAChB,IAAMsB,EAAqB,OAAO,OAAOlB,EAAM,OAAO,OAAO,EAAE,IAAKmB,GAAYA,EAAQ,OAAO,GAC3FD,EAAS,SAAS,gBAAgB,GAAKA,EAAS,SAAS,iBAAiB,KAAGnB,EAAM,MAAQC,EAAM,IAAI,GACrGD,EAAM,MAAQ,GAAK,CAACmB,EAAS,SAAS,gBAAgB,GAAK,CAACA,EAAS,SAAS,iBAAiB,IAAGnB,EAAM,IAAMC,EAAM,IAAI,GAC5HJ,EAAG,cAAgBA,EAAG,eAAkB,KAAK,IAAIG,EAAM,IAAMA,EAAM,KAAK,EAAIJ,EAAQ,UAAY,KAAK,IAAII,EAAM,IAAMA,EAAM,KAAK,EAAIJ,EAAQ,SACxIC,EAAG,eAAiBG,EAAM,OAAS,IAAGA,EAAM,KAAO,KAAK,MAAMA,EAAM,IAAMA,EAAM,KAAK,GACzFH,EAAG,aAAesB,EAAS,SAAS,eAAe,EACnDtB,EAAG,cAAgBsB,EAAS,SAAS,gBAAgB,EACrDtB,EAAG,gBAAkBI,EAAM,OAAO,KAAK,GAAG,UAAY,GAAKL,EAAQ,gBAAkBK,EAAM,OAAO,KAAK,GAAG,WAAa,GAAKL,EAAQ,cACpIC,EAAG,gBAAkBI,EAAM,OAAO,KAAK,GAAG,MAAQ,GAAKL,EAAQ,cAC/DC,EAAG,eAAiBI,EAAM,OAAO,KAAK,GAAG,MAAQ,GAAKL,EAAQ,cAC9DC,EAAG,SAAWI,EAAM,OAAO,KAAK,GAAG,IAAI,IAAML,EAAQ,SAAWK,EAAM,OAAO,KAAK,GAAG,IAAI,IAAML,EAAQ,OACzG,CACA,IAAIyB,EAAI,GACR,OAAW,CAACC,EAAKC,CAAG,IAAK,OAAO,QAAQ1B,CAAE,EAAG,CAC3C,IAAI2B,EAAK,SAAS,eAAe,MAAMF,GAAK,EACvCE,IACHA,EAAK,SAAS,cAAc,KAAK,EACjCA,EAAG,UAAYF,EACfE,EAAG,UAAY,KACfA,EAAG,MAAM,IAAM,GAAGH,MAClBlB,EAAI,GAAG,YAAYqB,CAAE,GAEnB,OAAOD,GAAQ,UAAWC,EAAG,MAAM,gBAAkBD,EAAM,aAAe,aACzEC,EAAG,UAAY,GAAGF,KAAOC,IAC9BF,GAAK,EACP,CAKA,OAJIvB,EAAM,GAIND,EAAG,UAAYD,EAAQ,SACzBO,EAAI,MAAM,MAAM,EACTF,EAAM,OAAO,KAAK,KAEzBJ,EAAG,UAAY,KAAK,MAAMI,EAAM,IAAI,EAAIK,CAAS,EAC1C,IAAI,QAASQ,GAAY,CAC9B,WAAW,SAAY,CACT,MAAMG,EAAe,GACxBH,EAAQb,EAAM,OAAO,KAAK,EAAE,CACvC,EAAG,EAAE,CACP,CAAC,EAEL,CAEA,eAAewB,GAAc,CAtL7B,IAAAC,EAAAC,EAAAC,EAAAC,EAuLE,GAAI1B,EAAI,KAAK,MAAM,OAAS,EAAG,CAC7B,IAAM2B,GAAQJ,EAAAvB,EAAI,OAAO,WAAW,IAAI,IAA1B,YAAAuB,EAA6B,aAAa,EAAG,EAAGvB,EAAI,OAAO,MAAOA,EAAI,OAAO,QACrF4B,EAAM,CAAE,GAAI,EAAG,KAAM5B,EAAI,KAAK,MAAO,YAAYwB,EAAA5B,EAAQ,OAAR,YAAA4B,EAAc,UAAuB,MAAAG,CAAM,EAClG,MAAcE,EAAKD,CAAG,EACtBxB,EAAI,qBAAsBwB,EAAI,KAAM,sBAAsBF,GAAAD,EAAA7B,EAAQ,OAAR,YAAA6B,EAAc,YAAd,YAAAC,EAAyB,MAAM,EACzFtB,EAAI,sBAAuB,MAAc0B,EAAM,CAAC,CAClD,MACE1B,EAAI,cAAc,CAEtB,CAEA,eAAe2B,GAAe,CACxBnC,EAAQ,QAAUA,EAAQ,OAAO,GAAK,GACxC,MAAcoC,EAAOpC,EAAQ,MAAM,CAEvC,CAEA,eAAeqC,GAAa,CAxM5B,IAAAV,EAAAC,EA0ME,IADAD,EAAAvB,EAAI,OAAO,WAAW,IAAI,IAA1B,MAAAuB,EAA6B,UAAU,EAAG,EAAG9B,EAAQ,QAASA,EAAQ,SAClE,CAACG,EAAQ,MAAQ,CAACA,EAAQ,KAAK,QAAU,CAACA,EAAQ,KAAK,UAAW,MAAO,GAI7E,GAFA,QAAQ,IAAI,eAAgBA,EAAQ,IAAI,EACxCE,EAAM,GAAG,QAAQ,SAASF,EAAQ,KAAK,OAAiCI,EAAI,MAAM,EAC9E,MAAc8B,EAAM,IAAM,EAC5B,OAAA1B,EAAI,wBAAwB,EAC5B,SAAS,KAAK,MAAM,WAAa,QACjCJ,EAAI,OAAO,MAAM,QAAU,OACpB,GAET,IAAMkC,EAAK,MAAcC,EAAK,EACxBC,EAAcF,EAAG,IAAKN,GAAQA,EAAI,UAAU,EAAE,OAAQS,GAASA,EAAK,OAAS,CAAC,EAC9EC,EAAM,MAAMxC,EAAM,MAAMF,EAAQ,KAAK,UAAWwC,EAAa5C,CAAY,EAC/E,OAAAI,EAAQ,OAASsC,EAAGI,EAAI,QAAU,KAC9B1C,EAAQ,SACVQ,EAAI,eAAeR,EAAQ,OAAO,cAAcA,EAAQ,OAAO,oBAAoB,KAAK,MAAM,IAAO0C,EAAI,UAAU,EAAI,KAAK,EAC5HtC,EAAI,KAAK,MAAQJ,EAAQ,OAAO,KAChCI,EAAI,OAAO,MAAM,QAAU,IAC3BwB,EAAAxB,EAAI,OAAO,WAAW,IAAI,IAA1B,MAAAwB,EAA6B,aAAa5B,EAAQ,OAAO,MAAO,EAAG,IAErE,SAAS,KAAK,MAAM,WAAa0C,EAAI,WAAa7C,EAAQ,UAAY,YAAc,SAC7E6C,EAAI,WAAa7C,EAAQ,SAClC,CAEA,eAAe8C,GAAO,CAlOtB,IAAAhB,EAAAC,EAAAC,EAAAC,EA4PE,OAzBAhC,EAAG,UAAY,GACfA,EAAG,eAAiB,GACpBA,EAAG,aAAe,GAClBA,EAAG,cAAgB,GACnBA,EAAG,SAAW,GACdA,EAAG,eAAiB,GACpBA,EAAG,cAAgB,GACnBA,EAAG,UAAY,EACfM,EAAI,MAAM,MAAM,QAAU,OAC1BA,EAAI,MAAM,MAAM,QAAU,OAC1BA,EAAI,OAAO,MAAM,QAAU,OAC3B,SAAS,KAAK,MAAM,WAAa,QACjC,MAAMO,EAAO,EACb,MAAMK,EAAc,EACpBT,EAAYL,EAAM,IAAI,EACtBF,EAAQ,KAAO,MAAMkB,EAAe,EACpCd,EAAI,OAAO,QAAQwB,GAAAD,EAAA3B,EAAQ,OAAR,YAAA2B,EAAc,SAAd,YAAAC,EAAsB,MAAM,KAAM/B,EAAQ,QAC7DO,EAAI,OAAO,SAAS0B,GAAAD,EAAA7B,EAAQ,OAAR,YAAA6B,EAAc,SAAd,YAAAC,EAAsB,MAAM,KAAMjC,EAAQ,QAC9DO,EAAI,OAAO,MAAQA,EAAI,OAAO,MAC9BA,EAAI,OAAO,OAASA,EAAI,OAAO,OAC/BA,EAAI,OAAO,MAAM,MAAQ,GACzBA,EAAI,MAAM,MAAM,QAAU,OAC1BA,EAAI,KAAK,MAAM,QAAU,OACzBA,EAAI,OAAO,MAAM,QAAU,OAC3BA,EAAI,MAAM,MAAM,QAAU,QACrBL,EAAM,EAIFsC,EAAW,GAHlB7B,EAAI,yBAAyB,EACtB,GAIX,CAEA,eAAeoC,GAAO,CApQtB,IAAAjB,EAAAC,EAAAC,EAqQErB,EAAI,iBAAkBN,EAAM,QAAS,kBAAmBA,EAAM,GAAG,QAAQ,YAAY,EACrFM,EAAI,yBAAyBmB,EAAAhC,EAAY,KAAK,cAAjB,MAAAgC,EAAiC,QAAU,UAAY,IAAIC,EAAAjC,EAAY,KAAK,gBAAjB,MAAAiC,EAAmC,QAAU,gBAAkB,IAAIC,EAAAlC,EAAY,KAAK,cAAjB,MAAAkC,EAAiC,QAAU,cAAgB,EAAE,EACxNrB,EAAI,WAAY,KAAK,UAAUX,CAAO,EAAE,QAAQ,eAAgB,EAAE,EAAE,QAAQ,KAAM,GAAG,CAAC,EACtFa,EAAS,YAAY,EACrBF,EAAI,sBAAuB,MAAc0B,EAAM,CAAC,EAChD,MAAMvB,EAAO,EACb,MAAMT,EAAM,KAAK,EACjBQ,EAAS,iBAAiB,EAC1BN,EAAI,MAAM,iBAAiB,QAASuC,CAAI,EACxCvC,EAAI,KAAK,iBAAiB,QAASsB,CAAW,EAC9CtB,EAAI,OAAO,iBAAiB,QAAS+B,CAAY,EACjD,MAAMjC,EAAM,OAAO,EACnB,MAAMyC,EAAK,CACb,CAEA,OAAO,OAASC", - "names": ["Human", "db", "database", "table", "log", "msg", "open", "resolve", "request", "evt", "load", "faceDB", "cursor", "count", "store", "save", "faceRecord", "newRecord", "remove", "humanConfig", "matchOptions", "options", "ok", "allOk", "current", "blink", "human", "Human", "dom", "timestamp", "fps", "startTime", "log", "msg", "printFPS", "webCam", "cameraOptions", "stream", "ready", "resolve", "detectionLoop", "now", "validationLoop", "interpolated", "gestures", "gesture", "y", "key", "val", "el", "saveRecords", "_a", "_b", "_c", "_d", "image", "rec", "save", "count", "deleteRecord", "remove", "detectFace", "db", "load", "descriptors", "desc", "res", "main", "init"] + "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\nimport * as indexDb from './indexdb'; // methods to deal with indexdb\n\nconst humanConfig = { // user configuration for human, used to fine-tune behavior\n modelBasePath: '../../models',\n filter: { equalization: true }, // lets run with histogram equilizer\n face: {\n enabled: true,\n detector: { rotation: true, return: true, cropFactor: 1.6, mask: false }, // return tensor is used to get detected face image\n description: { enabled: true }, // default model for face descriptor extraction is faceres\n // mobilefacenet: { enabled: true, modelPath: 'https://vladmandic.github.io/human-models/models/mobilefacenet.json' }, // alternative model\n // insightface: { enabled: true, modelPath: 'https://vladmandic.github.io/insightface/models/insightface-mobilenet-swish.json' }, // alternative model\n iris: { enabled: true }, // needed to determine gaze direction\n emotion: { enabled: false }, // not needed\n antispoof: { enabled: true }, // enable optional antispoof module\n liveness: { enabled: true }, // enable optional liveness module\n },\n body: { enabled: false },\n hand: { enabled: false },\n object: { enabled: false },\n gesture: { enabled: true }, // parses face and iris gestures\n};\n\n// const matchOptions = { order: 2, multiplier: 1000, min: 0.0, max: 1.0 }; // for embedding model\nconst matchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 }; // for faceres model\n\nconst options = {\n minConfidence: 0.6, // overal face confidence for box, face, gender, real, live\n minSize: 224, // min input to face descriptor model before degradation\n maxTime: 10000, // max time before giving up\n blinkMin: 10, // minimum duration of a valid blink\n blinkMax: 800, // maximum duration of a valid blink\n threshold: 0.5, // minimum similarity\n mask: humanConfig.face.detector.mask,\n rotation: humanConfig.face.detector.rotation,\n cropFactor: humanConfig.face.detector.cropFactor,\n ...matchOptions,\n};\n\nconst ok = { // must meet all rules\n faceCount: false,\n faceConfidence: false,\n facingCenter: false,\n lookingCenter: false,\n blinkDetected: false,\n faceSize: false,\n antispoofCheck: false,\n livenessCheck: false,\n elapsedMs: 0, // total time while waiting for valid face\n};\nconst allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.lookingCenter && ok.faceConfidence && ok.antispoofCheck && ok.livenessCheck;\nconst current: { face: H.FaceResult | null, record: indexDb.FaceRecord | null } = { face: null, record: null }; // current face record and matched database record\n\nconst blink = { // internal timers for blink start/end/duration\n start: 0,\n end: 0,\n time: 0,\n};\n\n// let db: Array<{ name: string, source: string, embedding: number[] }> = []; // holds loaded face descriptor database\nconst human = new H.Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env.perfadd = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('fps') as HTMLPreElement,\n match: document.getElementById('match') as HTMLDivElement,\n name: document.getElementById('name') as HTMLInputElement,\n save: document.getElementById('save') as HTMLSpanElement,\n delete: document.getElementById('delete') as HTMLSpanElement,\n retry: document.getElementById('retry') as HTMLDivElement,\n source: document.getElementById('source') as HTMLCanvasElement,\n ok: document.getElementById('ok') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detect: 0, draw: 0 }; // holds calculated fps information for both detect and screen refresh\nlet startTime = 0;\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n console.log(...msg); // eslint-disable-line no-console\n};\nconst printFPS = (msg) => dom.fps.innerText = msg; // print status element\n\nasync function webCam() { // initialize webcam\n printFPS('starting webcam...');\n // @ts-ignore resizeMode is not yet defined in tslib\n const cameraOptions: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(cameraOptions);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n if (human.env.initial) log('video:', dom.video.videoWidth, dom.video.videoHeight, '|', stream.getVideoTracks()[0].label);\n dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click\n if (dom.video.paused) dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n if (current.face && current.face.tensor) human.tf.dispose(current.face.tensor); // dispose previous tensor\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const now = human.now();\n fps.detect = 1000 / (now - timestamp.detect);\n timestamp.detect = now;\n requestAnimationFrame(detectionLoop); // start new frame immediately\n }\n}\n\nasync function validationLoop(): Promise { // main screen refresh loop\n const interpolated = await human.next(human.result); // smoothen result using last-known results\n await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n const now = human.now();\n fps.draw = 1000 / (now - timestamp.draw);\n timestamp.draw = now;\n printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status\n ok.faceCount = human.result.face.length === 1; // must be exactly detected face\n if (ok.faceCount) { // skip the rest if no face\n const gestures: string[] = Object.values(human.result.gesture).map((gesture) => (gesture as H.GestureResult).gesture); // flatten all gestures\n if (gestures.includes('blink left eye') || gestures.includes('blink right eye')) blink.start = human.now(); // blink starts when eyes get closed\n if (blink.start > 0 && !gestures.includes('blink left eye') && !gestures.includes('blink right eye')) blink.end = human.now(); // if blink started how long until eyes are back open\n ok.blinkDetected = ok.blinkDetected || (Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax);\n if (ok.blinkDetected && blink.time === 0) blink.time = Math.trunc(blink.end - blink.start);\n ok.facingCenter = gestures.includes('facing center');\n ok.lookingCenter = gestures.includes('looking center'); // must face camera and look at camera\n ok.faceConfidence = (human.result.face[0].boxScore || 0) > options.minConfidence && (human.result.face[0].faceScore || 0) > options.minConfidence;\n ok.antispoofCheck = (human.result.face[0].real || 0) > options.minConfidence;\n ok.livenessCheck = (human.result.face[0].live || 0) > options.minConfidence;\n ok.faceSize = human.result.face[0].box[2] >= options.minSize && human.result.face[0].box[3] >= options.minSize;\n }\n let y = 32;\n for (const [key, val] of Object.entries(ok)) {\n let el = document.getElementById(`ok-${key}`);\n if (!el) {\n el = document.createElement('div');\n el.innerText = key;\n el.className = 'ok';\n el.style.top = `${y}px`;\n dom.ok.appendChild(el);\n }\n if (typeof val === 'boolean') el.style.backgroundColor = val ? 'lightgreen' : 'lightcoral';\n else el.innerText = `${key}:${val}`;\n y += 28;\n }\n if (allOk()) { // all criteria met\n dom.video.pause();\n return human.result.face[0];\n }\n if (ok.elapsedMs > options.maxTime) { // give up\n dom.video.pause();\n return human.result.face[0];\n }\n // run again\n ok.elapsedMs = Math.trunc(human.now() - startTime);\n return new Promise((resolve) => {\n setTimeout(async () => {\n const res = await validationLoop(); // run validation loop until conditions are met\n if (res) resolve(human.result.face[0]); // recursive promise resolve\n }, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n });\n}\n\nasync function saveRecords() {\n if (dom.name.value.length > 0) {\n const image = dom.canvas.getContext('2d')?.getImageData(0, 0, dom.canvas.width, dom.canvas.height) as ImageData;\n const rec = { id: 0, name: dom.name.value, descriptor: current.face?.embedding as number[], image };\n await indexDb.save(rec);\n log('saved face record:', rec.name, 'descriptor length:', current.face?.embedding?.length);\n log('known face records:', await indexDb.count());\n } else {\n log('invalid name');\n }\n}\n\nasync function deleteRecord() {\n if (current.record && current.record.id > 0) {\n await indexDb.remove(current.record);\n }\n}\n\nasync function detectFace() {\n dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize);\n if (!current.face || !current.face.tensor || !current.face.embedding) return false;\n console.log('face record:', current.face); // eslint-disable-line no-console\n human.tf.browser.toPixels(current.face.tensor as unknown as H.TensorLike, dom.canvas);\n if (await indexDb.count() === 0) {\n log('face database is empty');\n document.body.style.background = 'black';\n dom.delete.style.display = 'none';\n return false;\n }\n const db = await indexDb.load();\n const descriptors = db.map((rec) => rec.descriptor).filter((desc) => desc.length > 0);\n const res = await human.match(current.face.embedding, descriptors, matchOptions);\n current.record = db[res.index] || null;\n if (current.record) {\n log(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1000 * res.similarity) / 10}%`);\n dom.name.value = current.record.name;\n dom.source.style.display = '';\n dom.source.getContext('2d')?.putImageData(current.record.image, 0, 0);\n }\n document.body.style.background = res.similarity > options.threshold ? 'darkgreen' : 'maroon';\n return res.similarity > options.threshold;\n}\n\nasync function main() { // main entry point\n ok.faceCount = false;\n ok.faceConfidence = false;\n ok.facingCenter = false;\n ok.blinkDetected = false;\n ok.faceSize = false;\n ok.antispoofCheck = false;\n ok.livenessCheck = false;\n ok.elapsedMs = 0;\n dom.match.style.display = 'none';\n dom.retry.style.display = 'none';\n dom.source.style.display = 'none';\n document.body.style.background = 'black';\n await webCam();\n await detectionLoop(); // start detection loop\n startTime = human.now();\n current.face = await validationLoop(); // start validation loop\n dom.canvas.width = current.face.tensor?.shape[1] || options.minSize;\n dom.canvas.height = current.face.tensor?.shape[0] || options.minSize;\n dom.source.width = dom.canvas.width;\n dom.source.height = dom.canvas.height;\n dom.canvas.style.width = '';\n dom.match.style.display = 'flex';\n dom.save.style.display = 'flex';\n dom.delete.style.display = 'flex';\n dom.retry.style.display = 'block';\n if (!allOk()) { // is all criteria met?\n log('did not find valid face');\n return false;\n }\n return detectFace();\n}\n\nasync function init() {\n log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);\n log('face embedding model:', humanConfig.face.description.enabled ? 'faceres' : '', humanConfig.face['mobilefacenet']?.enabled ? 'mobilefacenet' : '', humanConfig.face['insightface']?.enabled ? 'insightface' : '');\n log('options:', JSON.stringify(options).replace(/{|}|\"|\\[|\\]/g, '').replace(/,/g, ' '));\n printFPS('loading...');\n log('known face records:', await indexDb.count());\n await webCam(); // start webcam\n await human.load(); // preload all models\n printFPS('initializing...');\n dom.retry.addEventListener('click', main);\n dom.save.addEventListener('click', saveRecords);\n dom.delete.addEventListener('click', deleteRecord);\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await main();\n}\n\nwindow.onload = init;\n", "let db: IDBDatabase; // instance of indexdb\n\nconst database = 'human';\nconst table = 'person';\n\nexport interface FaceRecord { id: number, name: string, descriptor: number[], image: ImageData }\n\nconst log = (...msg) => console.log('indexdb', ...msg); // eslint-disable-line no-console\n\nexport async function open() {\n if (db) return true;\n return new Promise((resolve) => {\n const request: IDBOpenDBRequest = indexedDB.open(database, 1);\n request.onerror = (evt) => log('error:', evt);\n request.onupgradeneeded = (evt: IDBVersionChangeEvent) => { // create if doesnt exist\n log('create:', evt.target);\n db = (evt.target as IDBOpenDBRequest).result;\n db.createObjectStore(table, { keyPath: 'id', autoIncrement: true });\n };\n request.onsuccess = (evt) => { // open\n db = (evt.target as IDBOpenDBRequest).result;\n log('open:', db);\n resolve(true);\n };\n });\n}\n\nexport async function load(): Promise {\n const faceDB: FaceRecord[] = [];\n if (!db) await open(); // open or create if not already done\n return new Promise((resolve) => {\n const cursor: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).openCursor(null, 'next');\n cursor.onerror = (evt) => log('load error:', evt);\n cursor.onsuccess = (evt) => {\n if ((evt.target as IDBRequest).result) {\n faceDB.push((evt.target as IDBRequest).result.value);\n (evt.target as IDBRequest).result.continue();\n } else {\n resolve(faceDB);\n }\n };\n });\n}\n\nexport async function count(): Promise {\n if (!db) await open(); // open or create if not already done\n return new Promise((resolve) => {\n const store: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).count();\n store.onerror = (evt) => log('count error:', evt);\n store.onsuccess = () => resolve(store.result);\n });\n}\n\nexport async function save(faceRecord: FaceRecord) {\n if (!db) await open(); // open or create if not already done\n const newRecord = { name: faceRecord.name, descriptor: faceRecord.descriptor, image: faceRecord.image }; // omit id as its autoincrement\n db.transaction([table], 'readwrite').objectStore(table).put(newRecord);\n log('save:', newRecord);\n}\n\nexport async function remove(faceRecord: FaceRecord) {\n if (!db) await open(); // open or create if not already done\n db.transaction([table], 'readwrite').objectStore(table).delete(faceRecord.id); // delete based on id\n log('delete:', faceRecord);\n}\n"], + "mappings": ";;;;;;AASA,UAAYA,MAAO,0BCTnB,IAAIC,EAEEC,EAAW,QACXC,EAAQ,SAIRC,EAAM,IAAIC,IAAQ,QAAQ,IAAI,UAAW,GAAGA,CAAG,EAErD,eAAsBC,GAAO,CAC3B,OAAIL,EAAW,GACR,IAAI,QAASM,GAAY,CAC9B,IAAMC,EAA4B,UAAU,KAAKN,EAAU,CAAC,EAC5DM,EAAQ,QAAWC,GAAQL,EAAI,SAAUK,CAAG,EAC5CD,EAAQ,gBAAmBC,GAA+B,CACxDL,EAAI,UAAWK,EAAI,MAAM,EACzBR,EAAMQ,EAAI,OAA4B,OACtCR,EAAG,kBAAkBE,EAAO,CAAE,QAAS,KAAM,cAAe,EAAK,CAAC,CACpE,EACAK,EAAQ,UAAaC,GAAQ,CAC3BR,EAAMQ,EAAI,OAA4B,OACtCL,EAAI,QAASH,CAAE,EACfM,EAAQ,EAAI,CACd,CACF,CAAC,CACH,CAEA,eAAsBG,GAA8B,CAClD,IAAMC,EAAuB,CAAC,EAC9B,OAAKV,GAAI,MAAMK,EAAK,EACb,IAAI,QAASC,GAAY,CAC9B,IAAMK,EAAqBX,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,WAAW,KAAM,MAAM,EAC1GS,EAAO,QAAWH,GAAQL,EAAI,cAAeK,CAAG,EAChDG,EAAO,UAAaH,GAAQ,CACrBA,EAAI,OAAsB,QAC7BE,EAAO,KAAMF,EAAI,OAAsB,OAAO,KAAK,EAClDA,EAAI,OAAsB,OAAO,SAAS,GAE3CF,EAAQI,CAAM,CAElB,CACF,CAAC,CACH,CAEA,eAAsBE,GAAyB,CAC7C,OAAKZ,GAAI,MAAMK,EAAK,EACb,IAAI,QAASC,GAAY,CAC9B,IAAMO,EAAoBb,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,MAAM,EACxFW,EAAM,QAAWL,GAAQL,EAAI,eAAgBK,CAAG,EAChDK,EAAM,UAAY,IAAMP,EAAQO,EAAM,MAAM,CAC9C,CAAC,CACH,CAEA,eAAsBC,EAAKC,EAAwB,CAC5Cf,GAAI,MAAMK,EAAK,EACpB,IAAMW,EAAY,CAAE,KAAMD,EAAW,KAAM,WAAYA,EAAW,WAAY,MAAOA,EAAW,KAAM,EACtGf,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,IAAIc,CAAS,EACrEb,EAAI,QAASa,CAAS,CACxB,CAEA,eAAsBC,EAAOF,EAAwB,CAC9Cf,GAAI,MAAMK,EAAK,EACpBL,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,OAAOa,EAAW,EAAE,EAC5EZ,EAAI,UAAWY,CAAU,CAC3B,CDpDA,IAAMG,EAAc,CAClB,cAAe,eACf,OAAQ,CAAE,aAAc,EAAK,EAC7B,KAAM,CACJ,QAAS,GACT,SAAU,CAAE,SAAU,GAAM,OAAQ,GAAM,WAAY,IAAK,KAAM,EAAM,EACvE,YAAa,CAAE,QAAS,EAAK,EAG7B,KAAM,CAAE,QAAS,EAAK,EACtB,QAAS,CAAE,QAAS,EAAM,EAC1B,UAAW,CAAE,QAAS,EAAK,EAC3B,SAAU,CAAE,QAAS,EAAK,CAC5B,EACA,KAAM,CAAE,QAAS,EAAM,EACvB,KAAM,CAAE,QAAS,EAAM,EACvB,OAAQ,CAAE,QAAS,EAAM,EACzB,QAAS,CAAE,QAAS,EAAK,CAC3B,EAGMC,EAAe,CAAE,MAAO,EAAG,WAAY,GAAI,IAAK,GAAK,IAAK,EAAI,EAE9DC,EAAU,CACd,cAAe,GACf,QAAS,IACT,QAAS,IACT,SAAU,GACV,SAAU,IACV,UAAW,GACX,KAAMF,EAAY,KAAK,SAAS,KAChC,SAAUA,EAAY,KAAK,SAAS,SACpC,WAAYA,EAAY,KAAK,SAAS,WACtC,GAAGC,CACL,EAEME,EAAK,CACT,UAAW,GACX,eAAgB,GAChB,aAAc,GACd,cAAe,GACf,cAAe,GACf,SAAU,GACV,eAAgB,GAChB,cAAe,GACf,UAAW,CACb,EACMC,EAAQ,IAAMD,EAAG,WAAaA,EAAG,UAAYA,EAAG,eAAiBA,EAAG,cAAgBA,EAAG,eAAiBA,EAAG,gBAAkBA,EAAG,gBAAkBA,EAAG,cACrJE,EAA4E,CAAE,KAAM,KAAM,OAAQ,IAAK,EAEvGC,EAAQ,CACZ,MAAO,EACP,IAAK,EACL,KAAM,CACR,EAGMC,EAAQ,IAAM,QAAMP,CAAW,EAErCO,EAAM,IAAI,QAAU,GACpBA,EAAM,KAAK,QAAQ,KAAO,yBAC1BA,EAAM,KAAK,QAAQ,WAAa,GAEhC,IAAMC,EAAM,CACV,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,IAAK,SAAS,eAAe,KAAK,EAClC,IAAK,SAAS,eAAe,KAAK,EAClC,MAAO,SAAS,eAAe,OAAO,EACtC,KAAM,SAAS,eAAe,MAAM,EACpC,KAAM,SAAS,eAAe,MAAM,EACpC,OAAQ,SAAS,eAAe,QAAQ,EACxC,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,GAAI,SAAS,eAAe,IAAI,CAClC,EACMC,EAAY,CAAE,OAAQ,EAAG,KAAM,CAAE,EACjCC,EAAM,CAAE,OAAQ,EAAG,KAAM,CAAE,EAC7BC,EAAY,EAEVC,EAAM,IAAIC,IAAQ,CACtBL,EAAI,IAAI,WAAaK,EAAI,KAAK,GAAG,EAAI;AAAA,EACrC,QAAQ,IAAI,GAAGA,CAAG,CACpB,EACMC,EAAYD,GAAQL,EAAI,IAAI,UAAYK,EAE9C,eAAeE,GAAS,CACtBD,EAAS,oBAAoB,EAE7B,IAAME,EAAwC,CAAE,MAAO,GAAO,MAAO,CAAE,WAAY,OAAQ,WAAY,OAAQ,MAAO,CAAE,MAAO,SAAS,KAAK,WAAY,CAAE,CAAE,EACvJC,EAAsB,MAAM,UAAU,aAAa,aAAaD,CAAa,EAC7EE,EAAQ,IAAI,QAASC,GAAY,CAAEX,EAAI,MAAM,aAAe,IAAMW,EAAQ,EAAI,CAAG,CAAC,EACxFX,EAAI,MAAM,UAAYS,EACtBT,EAAI,MAAM,KAAK,EACf,MAAMU,EACNV,EAAI,OAAO,MAAQA,EAAI,MAAM,WAC7BA,EAAI,OAAO,OAASA,EAAI,MAAM,YAC1BD,EAAM,IAAI,SAASK,EAAI,SAAUJ,EAAI,MAAM,WAAYA,EAAI,MAAM,YAAa,IAAKS,EAAO,eAAe,EAAE,GAAG,KAAK,EACvHT,EAAI,OAAO,QAAU,IAAM,CACrBA,EAAI,MAAM,OAAQA,EAAI,MAAM,KAAK,EAChCA,EAAI,MAAM,MAAM,CACvB,CACF,CAEA,eAAeY,GAAgB,CAC7B,GAAI,CAACZ,EAAI,MAAM,OAAQ,CACjBH,EAAQ,MAAQA,EAAQ,KAAK,QAAQE,EAAM,GAAG,QAAQF,EAAQ,KAAK,MAAM,EAC7E,MAAME,EAAM,OAAOC,EAAI,KAAK,EAC5B,IAAMa,EAAMd,EAAM,IAAI,EACtBG,EAAI,OAAS,KAAQW,EAAMZ,EAAU,QACrCA,EAAU,OAASY,EACnB,sBAAsBD,CAAa,CACrC,CACF,CAEA,eAAeE,GAAwC,CACrD,IAAMC,EAAe,MAAMhB,EAAM,KAAKA,EAAM,MAAM,EAClD,MAAMA,EAAM,KAAK,OAAOC,EAAI,MAAOA,EAAI,MAAM,EAC7C,MAAMD,EAAM,KAAK,IAAIC,EAAI,OAAQe,CAAY,EAC7C,IAAMF,EAAMd,EAAM,IAAI,EAKtB,GAJAG,EAAI,KAAO,KAAQW,EAAMZ,EAAU,MACnCA,EAAU,KAAOY,EACjBP,EAAS,QAAQJ,EAAI,OAAO,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,cAAcA,EAAI,KAAK,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,QAAQ,EAC/GP,EAAG,UAAYI,EAAM,OAAO,KAAK,SAAW,EACxCJ,EAAG,UAAW,CAChB,IAAMqB,EAAqB,OAAO,OAAOjB,EAAM,OAAO,OAAO,EAAE,IAAKkB,GAAaA,EAA4B,OAAO,GAChHD,EAAS,SAAS,gBAAgB,GAAKA,EAAS,SAAS,iBAAiB,KAAGlB,EAAM,MAAQC,EAAM,IAAI,GACrGD,EAAM,MAAQ,GAAK,CAACkB,EAAS,SAAS,gBAAgB,GAAK,CAACA,EAAS,SAAS,iBAAiB,IAAGlB,EAAM,IAAMC,EAAM,IAAI,GAC5HJ,EAAG,cAAgBA,EAAG,eAAkB,KAAK,IAAIG,EAAM,IAAMA,EAAM,KAAK,EAAIJ,EAAQ,UAAY,KAAK,IAAII,EAAM,IAAMA,EAAM,KAAK,EAAIJ,EAAQ,SACxIC,EAAG,eAAiBG,EAAM,OAAS,IAAGA,EAAM,KAAO,KAAK,MAAMA,EAAM,IAAMA,EAAM,KAAK,GACzFH,EAAG,aAAeqB,EAAS,SAAS,eAAe,EACnDrB,EAAG,cAAgBqB,EAAS,SAAS,gBAAgB,EACrDrB,EAAG,gBAAkBI,EAAM,OAAO,KAAK,GAAG,UAAY,GAAKL,EAAQ,gBAAkBK,EAAM,OAAO,KAAK,GAAG,WAAa,GAAKL,EAAQ,cACpIC,EAAG,gBAAkBI,EAAM,OAAO,KAAK,GAAG,MAAQ,GAAKL,EAAQ,cAC/DC,EAAG,eAAiBI,EAAM,OAAO,KAAK,GAAG,MAAQ,GAAKL,EAAQ,cAC9DC,EAAG,SAAWI,EAAM,OAAO,KAAK,GAAG,IAAI,IAAML,EAAQ,SAAWK,EAAM,OAAO,KAAK,GAAG,IAAI,IAAML,EAAQ,OACzG,CACA,IAAIwB,EAAI,GACR,OAAW,CAACC,EAAKC,CAAG,IAAK,OAAO,QAAQzB,CAAE,EAAG,CAC3C,IAAI0B,EAAK,SAAS,eAAe,MAAMF,GAAK,EACvCE,IACHA,EAAK,SAAS,cAAc,KAAK,EACjCA,EAAG,UAAYF,EACfE,EAAG,UAAY,KACfA,EAAG,MAAM,IAAM,GAAGH,MAClBlB,EAAI,GAAG,YAAYqB,CAAE,GAEnB,OAAOD,GAAQ,UAAWC,EAAG,MAAM,gBAAkBD,EAAM,aAAe,aACzEC,EAAG,UAAY,GAAGF,KAAOC,IAC9BF,GAAK,EACP,CAKA,OAJItB,EAAM,GAIND,EAAG,UAAYD,EAAQ,SACzBM,EAAI,MAAM,MAAM,EACTD,EAAM,OAAO,KAAK,KAG3BJ,EAAG,UAAY,KAAK,MAAMI,EAAM,IAAI,EAAII,CAAS,EAC1C,IAAI,QAASQ,GAAY,CAC9B,WAAW,SAAY,CACT,MAAMG,EAAe,GACxBH,EAAQZ,EAAM,OAAO,KAAK,EAAE,CACvC,EAAG,EAAE,CACP,CAAC,EACH,CAEA,eAAeuB,GAAc,CArL7B,IAAAC,EAAAC,EAAAC,EAAAC,EAsLE,GAAI1B,EAAI,KAAK,MAAM,OAAS,EAAG,CAC7B,IAAM2B,GAAQJ,EAAAvB,EAAI,OAAO,WAAW,IAAI,IAA1B,YAAAuB,EAA6B,aAAa,EAAG,EAAGvB,EAAI,OAAO,MAAOA,EAAI,OAAO,QACrF4B,EAAM,CAAE,GAAI,EAAG,KAAM5B,EAAI,KAAK,MAAO,YAAYwB,EAAA3B,EAAQ,OAAR,YAAA2B,EAAc,UAAuB,MAAAG,CAAM,EAClG,MAAcE,EAAKD,CAAG,EACtBxB,EAAI,qBAAsBwB,EAAI,KAAM,sBAAsBF,GAAAD,EAAA5B,EAAQ,OAAR,YAAA4B,EAAc,YAAd,YAAAC,EAAyB,MAAM,EACzFtB,EAAI,sBAAuB,MAAc0B,EAAM,CAAC,CAClD,MACE1B,EAAI,cAAc,CAEtB,CAEA,eAAe2B,GAAe,CACxBlC,EAAQ,QAAUA,EAAQ,OAAO,GAAK,GACxC,MAAcmC,EAAOnC,EAAQ,MAAM,CAEvC,CAEA,eAAeoC,GAAa,CAvM5B,IAAAV,EAAAC,EAyME,IADAD,EAAAvB,EAAI,OAAO,WAAW,IAAI,IAA1B,MAAAuB,EAA6B,UAAU,EAAG,EAAG7B,EAAQ,QAASA,EAAQ,SAClE,CAACG,EAAQ,MAAQ,CAACA,EAAQ,KAAK,QAAU,CAACA,EAAQ,KAAK,UAAW,MAAO,GAG7E,GAFA,QAAQ,IAAI,eAAgBA,EAAQ,IAAI,EACxCE,EAAM,GAAG,QAAQ,SAASF,EAAQ,KAAK,OAAmCG,EAAI,MAAM,EAChF,MAAc8B,EAAM,IAAM,EAC5B,OAAA1B,EAAI,wBAAwB,EAC5B,SAAS,KAAK,MAAM,WAAa,QACjCJ,EAAI,OAAO,MAAM,QAAU,OACpB,GAET,IAAMkC,EAAK,MAAcC,EAAK,EACxBC,EAAcF,EAAG,IAAKN,GAAQA,EAAI,UAAU,EAAE,OAAQS,GAASA,EAAK,OAAS,CAAC,EAC9EC,EAAM,MAAMvC,EAAM,MAAMF,EAAQ,KAAK,UAAWuC,EAAa3C,CAAY,EAC/E,OAAAI,EAAQ,OAASqC,EAAGI,EAAI,QAAU,KAC9BzC,EAAQ,SACVO,EAAI,eAAeP,EAAQ,OAAO,cAAcA,EAAQ,OAAO,oBAAoB,KAAK,MAAM,IAAOyC,EAAI,UAAU,EAAI,KAAK,EAC5HtC,EAAI,KAAK,MAAQH,EAAQ,OAAO,KAChCG,EAAI,OAAO,MAAM,QAAU,IAC3BwB,EAAAxB,EAAI,OAAO,WAAW,IAAI,IAA1B,MAAAwB,EAA6B,aAAa3B,EAAQ,OAAO,MAAO,EAAG,IAErE,SAAS,KAAK,MAAM,WAAayC,EAAI,WAAa5C,EAAQ,UAAY,YAAc,SAC7E4C,EAAI,WAAa5C,EAAQ,SAClC,CAEA,eAAe6C,GAAO,CAhOtB,IAAAhB,EAAAC,EA0PE,OAzBA7B,EAAG,UAAY,GACfA,EAAG,eAAiB,GACpBA,EAAG,aAAe,GAClBA,EAAG,cAAgB,GACnBA,EAAG,SAAW,GACdA,EAAG,eAAiB,GACpBA,EAAG,cAAgB,GACnBA,EAAG,UAAY,EACfK,EAAI,MAAM,MAAM,QAAU,OAC1BA,EAAI,MAAM,MAAM,QAAU,OAC1BA,EAAI,OAAO,MAAM,QAAU,OAC3B,SAAS,KAAK,MAAM,WAAa,QACjC,MAAMO,EAAO,EACb,MAAMK,EAAc,EACpBT,EAAYJ,EAAM,IAAI,EACtBF,EAAQ,KAAO,MAAMiB,EAAe,EACpCd,EAAI,OAAO,QAAQuB,EAAA1B,EAAQ,KAAK,SAAb,YAAA0B,EAAqB,MAAM,KAAM7B,EAAQ,QAC5DM,EAAI,OAAO,SAASwB,EAAA3B,EAAQ,KAAK,SAAb,YAAA2B,EAAqB,MAAM,KAAM9B,EAAQ,QAC7DM,EAAI,OAAO,MAAQA,EAAI,OAAO,MAC9BA,EAAI,OAAO,OAASA,EAAI,OAAO,OAC/BA,EAAI,OAAO,MAAM,MAAQ,GACzBA,EAAI,MAAM,MAAM,QAAU,OAC1BA,EAAI,KAAK,MAAM,QAAU,OACzBA,EAAI,OAAO,MAAM,QAAU,OAC3BA,EAAI,MAAM,MAAM,QAAU,QACrBJ,EAAM,EAIJqC,EAAW,GAHhB7B,EAAI,yBAAyB,EACtB,GAGX,CAEA,eAAeoC,GAAO,CAjQtB,IAAAjB,EAAAC,EAkQEpB,EAAI,iBAAkBL,EAAM,QAAS,kBAAmBA,EAAM,GAAG,QAAQ,YAAY,EACrFK,EAAI,wBAAyBZ,EAAY,KAAK,YAAY,QAAU,UAAY,IAAI+B,EAAA/B,EAAY,KAAK,gBAAjB,MAAA+B,EAAmC,QAAU,gBAAkB,IAAIC,EAAAhC,EAAY,KAAK,cAAjB,MAAAgC,EAAiC,QAAU,cAAgB,EAAE,EACpNpB,EAAI,WAAY,KAAK,UAAUV,CAAO,EAAE,QAAQ,eAAgB,EAAE,EAAE,QAAQ,KAAM,GAAG,CAAC,EACtFY,EAAS,YAAY,EACrBF,EAAI,sBAAuB,MAAc0B,EAAM,CAAC,EAChD,MAAMvB,EAAO,EACb,MAAMR,EAAM,KAAK,EACjBO,EAAS,iBAAiB,EAC1BN,EAAI,MAAM,iBAAiB,QAASuC,CAAI,EACxCvC,EAAI,KAAK,iBAAiB,QAASsB,CAAW,EAC9CtB,EAAI,OAAO,iBAAiB,QAAS+B,CAAY,EACjD,MAAMhC,EAAM,OAAO,EACnB,MAAMwC,EAAK,CACb,CAEA,OAAO,OAASC", + "names": ["H", "db", "database", "table", "log", "msg", "open", "resolve", "request", "evt", "load", "faceDB", "cursor", "count", "store", "save", "faceRecord", "newRecord", "remove", "humanConfig", "matchOptions", "options", "ok", "allOk", "current", "blink", "human", "dom", "timestamp", "fps", "startTime", "log", "msg", "printFPS", "webCam", "cameraOptions", "stream", "ready", "resolve", "detectionLoop", "now", "validationLoop", "interpolated", "gestures", "gesture", "y", "key", "val", "el", "saveRecords", "_a", "_b", "_c", "_d", "image", "rec", "save", "count", "deleteRecord", "remove", "detectFace", "db", "load", "descriptors", "desc", "res", "main", "init"] } diff --git a/demo/faceid/index.ts b/demo/faceid/index.ts index feb31a1b..0abf6ef1 100644 --- a/demo/faceid/index.ts +++ b/demo/faceid/index.ts @@ -7,7 +7,7 @@ * @license MIT */ -import { Human, TensorLike, FaceResult } from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human +import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human import * as indexDb from './indexdb'; // methods to deal with indexdb const humanConfig = { // user configuration for human, used to fine-tune behavior @@ -58,7 +58,7 @@ const ok = { // must meet all rules elapsedMs: 0, // total time while waiting for valid face }; const allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.lookingCenter && ok.faceConfidence && ok.antispoofCheck && ok.livenessCheck; -const current: { face: FaceResult | null, record: indexDb.FaceRecord | null } = { face: null, record: null }; // current face record and matched database record +const current: { face: H.FaceResult | null, record: indexDb.FaceRecord | null } = { face: null, record: null }; // current face record and matched database record const blink = { // internal timers for blink start/end/duration start: 0, @@ -67,9 +67,9 @@ const blink = { // internal timers for blink start/end/duration }; // let db: Array<{ name: string, source: string, embedding: number[] }> = []; // holds loaded face descriptor database -const human = new Human(humanConfig); // create instance of human with overrides from user configuration +const human = new H.Human(humanConfig); // create instance of human with overrides from user configuration -human.env['perfadd'] = false; // is performance data showing instant or total values +human.env.perfadd = false; // is performance data showing instant or total values human.draw.options.font = 'small-caps 18px "Lato"'; // set font used to draw labels when using draw methods human.draw.options.lineHeight = 20; @@ -92,8 +92,7 @@ let startTime = 0; const log = (...msg) => { // helper method to output messages dom.log.innerText += msg.join(' ') + '\n'; - // eslint-disable-next-line no-console - console.log(...msg); + console.log(...msg); // eslint-disable-line no-console }; const printFPS = (msg) => dom.fps.innerText = msg; // print status element @@ -126,7 +125,7 @@ async function detectionLoop() { // main detection loop } } -async function validationLoop(): Promise { // main screen refresh loop +async function validationLoop(): Promise { // main screen refresh loop const interpolated = await human.next(human.result); // smoothen result using last-known results await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc. @@ -136,7 +135,7 @@ async function validationLoop(): Promise { // main screen refresh lo printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status ok.faceCount = human.result.face.length === 1; // must be exactly detected face if (ok.faceCount) { // skip the rest if no face - const gestures: string[] = Object.values(human.result.gesture).map((gesture) => gesture.gesture); // flatten all gestures + const gestures: string[] = Object.values(human.result.gesture).map((gesture) => (gesture as H.GestureResult).gesture); // flatten all gestures if (gestures.includes('blink left eye') || gestures.includes('blink right eye')) blink.start = human.now(); // blink starts when eyes get closed if (blink.start > 0 && !gestures.includes('blink left eye') && !gestures.includes('blink right eye')) blink.end = human.now(); // if blink started how long until eyes are back open ok.blinkDetected = ok.blinkDetected || (Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax); @@ -169,15 +168,15 @@ async function validationLoop(): Promise { // main screen refresh lo if (ok.elapsedMs > options.maxTime) { // give up dom.video.pause(); return human.result.face[0]; - } else { // run again - ok.elapsedMs = Math.trunc(human.now() - startTime); - return new Promise((resolve) => { - setTimeout(async () => { - const res = await validationLoop(); // run validation loop until conditions are met - if (res) resolve(human.result.face[0]); // recursive promise resolve - }, 30); // use to slow down refresh from max refresh rate to target of 30 fps - }); } + // run again + ok.elapsedMs = Math.trunc(human.now() - startTime); + return new Promise((resolve) => { + setTimeout(async () => { + const res = await validationLoop(); // run validation loop until conditions are met + if (res) resolve(human.result.face[0]); // recursive promise resolve + }, 30); // use to slow down refresh from max refresh rate to target of 30 fps + }); } async function saveRecords() { @@ -201,9 +200,8 @@ async function deleteRecord() { async function detectFace() { dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize); if (!current.face || !current.face.tensor || !current.face.embedding) return false; - // eslint-disable-next-line no-console - console.log('face record:', current.face); - human.tf.browser.toPixels(current.face.tensor as unknown as TensorLike, dom.canvas); + console.log('face record:', current.face); // eslint-disable-line no-console + human.tf.browser.toPixels(current.face.tensor as unknown as H.TensorLike, dom.canvas); if (await indexDb.count() === 0) { log('face database is empty'); document.body.style.background = 'black'; @@ -241,8 +239,8 @@ async function main() { // main entry point await detectionLoop(); // start detection loop startTime = human.now(); current.face = await validationLoop(); // start validation loop - dom.canvas.width = current.face?.tensor?.shape[1] || options.minSize; - dom.canvas.height = current.face?.tensor?.shape[0] || options.minSize; + dom.canvas.width = current.face.tensor?.shape[1] || options.minSize; + dom.canvas.height = current.face.tensor?.shape[0] || options.minSize; dom.source.width = dom.canvas.width; dom.source.height = dom.canvas.height; dom.canvas.style.width = ''; @@ -253,14 +251,13 @@ async function main() { // main entry point if (!allOk()) { // is all criteria met? log('did not find valid face'); return false; - } else { - return detectFace(); } + return detectFace(); } async function init() { log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']); - log('face embedding model:', humanConfig.face['description']?.enabled ? 'faceres' : '', humanConfig.face['mobilefacenet']?.enabled ? 'mobilefacenet' : '', humanConfig.face['insightface']?.enabled ? 'insightface' : ''); + log('face embedding model:', humanConfig.face.description.enabled ? 'faceres' : '', humanConfig.face['mobilefacenet']?.enabled ? 'mobilefacenet' : '', humanConfig.face['insightface']?.enabled ? 'insightface' : ''); log('options:', JSON.stringify(options).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ' ')); printFPS('loading...'); log('known face records:', await indexDb.count()); diff --git a/demo/faceid/indexdb.ts b/demo/faceid/indexdb.ts index 77fee852..af324778 100644 --- a/demo/faceid/indexdb.ts +++ b/demo/faceid/indexdb.ts @@ -3,10 +3,9 @@ let db: IDBDatabase; // instance of indexdb const database = 'human'; const table = 'person'; -export type FaceRecord = { id: number, name: string, descriptor: number[], image: ImageData }; +export interface FaceRecord { id: number, name: string, descriptor: number[], image: ImageData } -// eslint-disable-next-line no-console -const log = (...msg) => console.log('indexdb', ...msg); +const log = (...msg) => console.log('indexdb', ...msg); // eslint-disable-line no-console export async function open() { if (db) return true; @@ -19,7 +18,7 @@ export async function open() { db.createObjectStore(table, { keyPath: 'id', autoIncrement: true }); }; request.onsuccess = (evt) => { // open - db = (evt.target as IDBOpenDBRequest).result as IDBDatabase; + db = (evt.target as IDBOpenDBRequest).result; log('open:', db); resolve(true); }; @@ -27,7 +26,7 @@ export async function open() { } export async function load(): Promise { - const faceDB: Array = []; + const faceDB: FaceRecord[] = []; if (!db) await open(); // open or create if not already done return new Promise((resolve) => { const cursor: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).openCursor(null, 'next'); diff --git a/demo/facematch/facematch.js b/demo/facematch/facematch.js index d7d3ca5e..ceac0522 100644 --- a/demo/facematch/facematch.js +++ b/demo/facematch/facematch.js @@ -1,4 +1,3 @@ -// @ts-nocheck /** * Human demo for browsers * @@ -6,7 +5,7 @@ */ /** @type {Human} */ -import Human from '../../dist/human.esm.js'; +import { Human } from '../../dist/human.esm.js'; const userConfig = { backend: 'humangl', @@ -46,8 +45,7 @@ const minScore = 0.4; function log(...msg) { const dt = new Date(); const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`; - // eslint-disable-next-line no-console - console.log(ts, ...msg); + console.log(ts, ...msg); // eslint-disable-line no-console } function title(msg) { @@ -181,10 +179,12 @@ async function AddImageElement(index, image, length) { const img = new Image(128, 128); img.onload = () => { // must wait until image is loaded document.getElementById('images').appendChild(img); // and finally we can add it - human.detect(img, userConfig).then((res) => { - AddFaceCanvas(index, res, image); // then wait until image is analyzed - resolve(true); - }); + human.detect(img, userConfig) + .then((res) => { // eslint-disable-line promise/always-return + AddFaceCanvas(index, res, image); // then wait until image is analyzed + resolve(true); + }) + .catch(() => log('human detect error')); }; img.onerror = () => { log('Add image error:', index + 1, image); diff --git a/demo/facematch/index.html b/demo/facematch/index.html index 8d4a8c96..3ebf3b9e 100644 --- a/demo/facematch/index.html +++ b/demo/facematch/index.html @@ -45,6 +45,6 @@
Select person to sort by similarity and get a known face match
- + diff --git a/demo/facematch/node-match-worker.js b/demo/facematch/node-match-worker.js index e8556db5..14e648f3 100644 --- a/demo/facematch/node-match-worker.js +++ b/demo/facematch/node-match-worker.js @@ -52,7 +52,7 @@ threads.parentPort?.on('message', (msg) => { if (msg instanceof SharedArrayBuffer) { // called only once to receive reference to shared array buffer buffer = msg; view = new Float32Array(buffer); // initialize f64 view into buffer - if (debug) threads.parentPort?.postMessage(`buffer: ${buffer?.byteLength}`); + if (debug) threads.parentPort?.postMessage(`buffer: ${buffer.byteLength}`); } if (typeof msg.records !== 'undefined') { // recived every time when number of records changes records = msg.records; @@ -68,7 +68,7 @@ threads.parentPort?.on('message', (msg) => { } if (typeof msg.shutdown !== 'undefined') { // got message to close worker if (debug) threads.parentPort?.postMessage('shutting down'); - process.exit(0); + process.exit(0); // eslint-disable-line no-process-exit } }); diff --git a/demo/facematch/node-match.js b/demo/facematch/node-match.js index 16e7a5cd..6e268f82 100644 --- a/demo/facematch/node-match.js +++ b/demo/facematch/node-match.js @@ -6,8 +6,8 @@ const fs = require('fs'); const path = require('path'); -const log = require('@vladmandic/pilogger'); const threads = require('worker_threads'); +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require // global optinos const options = { @@ -160,7 +160,7 @@ async function createBuffer() { data.buffer = new SharedArrayBuffer(4 * options.dbMax * options.descLength); // preallocate max number of records as sharedarraybuffers cannot grow data.view = new Float32Array(data.buffer); // create view into buffer data.labels.length = 0; - log.data('created shared buffer:', { maxDescriptors: (data.view?.length || 0) / options.descLength, totalBytes: data.buffer.byteLength, totalElements: data.view?.length }); + log.data('created shared buffer:', { maxDescriptors: (data.view.length || 0) / options.descLength, totalBytes: data.buffer.byteLength, totalElements: data.view.length }); } async function main() { diff --git a/demo/helpers/gl-bench.js b/demo/helpers/gl-bench.js index 223d3f14..6a5fa0ab 100644 --- a/demo/helpers/gl-bench.js +++ b/demo/helpers/gl-bench.js @@ -1,4 +1,3 @@ -// @ts-nocheck // based on: https://github.com/munrocket/gl-bench const UICSS = ` @@ -43,9 +42,7 @@ class GLBench { constructor(gl, settings = {}) { this.css = UICSS; this.svg = UISVG; - // eslint-disable-next-line @typescript-eslint/no-empty-function this.paramLogger = () => {}; - // eslint-disable-next-line @typescript-eslint/no-empty-function this.chartLogger = () => {}; this.chartLen = 20; this.chartHz = 20; @@ -92,7 +89,6 @@ class GLBench { const addProfiler = (fn, self, target) => { const t = self.now(); - // eslint-disable-next-line prefer-rest-params fn.apply(target, arguments); if (self.trackGPU) self.finished.push(glFinish(t, self.activeAccums.slice(0))); }; @@ -107,13 +103,11 @@ class GLBench { if (gl[fn]) { gl[fn] = addProfiler(gl[fn], this, gl); } else { - // eslint-disable-next-line no-console console.log('bench: cannot attach to webgl function'); } /* gl.getExtension = ((fn, self) => { - // eslint-disable-next-line prefer-rest-params const ext = fn.apply(gl, arguments); if (ext) { ['drawElementsInstancedANGLE', 'drawBuffersWEBGL'].forEach((fn2) => { @@ -148,7 +142,6 @@ class GLBench { return (i, cpu, gpu, mem, fps, totalTime, frameId) => { nodes['gl-cpu'][i].style.strokeDasharray = (cpu * 0.27).toFixed(0) + ' 100'; nodes['gl-gpu'][i].style.strokeDasharray = (gpu * 0.27).toFixed(0) + ' 100'; - // eslint-disable-next-line no-nested-ternary nodes['gl-mem'][i].innerHTML = names[i] ? names[i] : (mem ? 'mem: ' + mem.toFixed(0) + 'mb' : ''); nodes['gl-fps'][i].innerHTML = 'FPS: ' + fps.toFixed(1); logger(names[i], cpu, gpu, mem, fps, totalTime, frameId); diff --git a/demo/helpers/jsonview.js b/demo/helpers/jsonview.js index 08dc1d9c..8d970b87 100644 --- a/demo/helpers/jsonview.js +++ b/demo/helpers/jsonview.js @@ -64,9 +64,7 @@ function createNode() { hideChildren() { if (Array.isArray(this.children)) { this.children.forEach((item) => { - // @ts-ignore item['elem']['classList'].add('hide'); - // @ts-ignore if (item['expanded']) item.hideChildren(); }); } @@ -74,9 +72,7 @@ function createNode() { showChildren() { if (Array.isArray(this.children)) { this.children.forEach((item) => { - // @ts-ignore item['elem']['classList'].remove('hide'); - // @ts-ignore if (item['expanded']) item.showChildren(); }); } diff --git a/demo/helpers/menu.js b/demo/helpers/menu.js index 6406a1c5..a3145a54 100644 --- a/demo/helpers/menu.js +++ b/demo/helpers/menu.js @@ -119,7 +119,6 @@ class Menu { this.menu.appendChild(this.container); if (typeof parent === 'object') parent.appendChild(this.menu); - // @ts-ignore undefined else document.getElementById(parent).appendChild(this.menu); } @@ -184,7 +183,6 @@ class Menu { this.hidden = !this.hidden; const all = document.getElementsByClassName('menu'); for (const item of all) { - // @ts-ignore item.style.display = this.hidden ? 'none' : 'block'; } }); @@ -241,7 +239,6 @@ class Menu { el.addEventListener('change', (evt) => { if (evt.target) { object[variable] = parseInt(evt.target['value']) === parseFloat(evt.target['value']) ? parseInt(evt.target['value']) : parseFloat(evt.target['value']); - // @ts-ignore evt.target.setAttribute('value', evt.target['value']); if (callback) callback(evt.target['value']); } @@ -286,7 +283,6 @@ class Menu { return el; } - // eslint-disable-next-line class-methods-use-this updateValue(title, val, suffix = '') { const el = document.getElementById(`menu-val-${title}`); if (el) el.innerText = `${title}: ${val}${suffix}`; @@ -303,11 +299,9 @@ class Menu { return el; } - // eslint-disable-next-line class-methods-use-this async updateChart(id, values) { if (!values || (values.length === 0)) return; /** @type {HTMLCanvasElement} */ - // @ts-ignore undefined const canvas = document.getElementById(`menu-canvas-${id}`); if (!canvas) return; const ctx = canvas.getContext('2d'); diff --git a/demo/helpers/webrtc.js b/demo/helpers/webrtc.js index a1b10053..6f083e71 100644 --- a/demo/helpers/webrtc.js +++ b/demo/helpers/webrtc.js @@ -4,8 +4,7 @@ async function log(...msg) { if (debug) { const dt = new Date(); const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`; - // eslint-disable-next-line no-console - console.log(ts, 'webrtc', ...msg); + console.log(ts, 'webrtc', ...msg); // eslint-disable-line no-console } } diff --git a/demo/index-pwa.js b/demo/index-pwa.js index 6f20f58d..781567ad 100644 --- a/demo/index-pwa.js +++ b/demo/index-pwa.js @@ -2,6 +2,7 @@ * PWA Service Worker for Human main demo */ +/* eslint-disable no-restricted-globals */ /// const skipCaching = false; @@ -19,8 +20,7 @@ const stats = { hit: 0, miss: 0 }; const log = (...msg) => { const dt = new Date(); const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`; - // eslint-disable-next-line no-console - console.log(ts, 'pwa', ...msg); + console.log(ts, 'pwa', ...msg); // eslint-disable-line no-console }; async function updateCached(req) { @@ -28,7 +28,7 @@ async function updateCached(req) { .then((update) => { // update cache if request is ok if (update.ok) { - caches + caches // eslint-disable-line promise/no-nesting .open(cacheName) .then((cache) => cache.put(req, update)) .catch((err) => log('cache update error', err)); @@ -75,14 +75,13 @@ async function getCached(evt) { } function cacheInit() { - // eslint-disable-next-line promise/catch-or-return caches.open(cacheName) - // eslint-disable-next-line promise/no-nesting - .then((cache) => cache.addAll(cacheFiles) + .then((cache) => cache.addAll(cacheFiles) // eslint-disable-line promise/no-nesting .then( () => log('cache refresh:', cacheFiles.length, 'files'), (err) => log('cache error', err), - )); + )) + .catch(() => log('cache error')); } if (!listening) { @@ -99,14 +98,12 @@ if (!listening) { self.addEventListener('install', (evt) => { log('install'); - // @ts-ignore scope for self is ServiceWorkerGlobalScope not Window self.skipWaiting(); evt.waitUntil(cacheInit); }); self.addEventListener('activate', (evt) => { log('activate'); - // @ts-ignore scope for self is ServiceWorkerGlobalScope not Window evt.waitUntil(self.clients.claim()); }); @@ -114,7 +111,7 @@ if (!listening) { const uri = new URL(evt.request.url); // if (uri.pathname === '/') { log('cache skip /', evt.request); return; } // skip root access requests if (evt.request.cache === 'only-if-cached' && evt.request.mode !== 'same-origin') return; // required due to chrome bug - if (uri.origin !== location.origin) return; // skip non-local requests + if (uri.origin !== self.location.origin) return; // skip non-local requests if (evt.request.method !== 'GET') return; // only cache get requests if (evt.request.url.includes('/api/')) return; // don't cache api requests, failures are handled at the time of call @@ -129,7 +126,7 @@ if (!listening) { log(`PWA: ${evt.type}`); if (refreshed) return; refreshed = true; - location.reload(); + self.location.reload(); }); listening = true; diff --git a/demo/index-worker.js b/demo/index-worker.js index ee729421..a44eb85b 100644 --- a/demo/index-worker.js +++ b/demo/index-worker.js @@ -6,10 +6,9 @@ /// // load Human using IIFE script as Chome Mobile does not support Modules as Workers -self.importScripts('../dist/human.js'); +self.importScripts('../dist/human.js'); // eslint-disable-line no-restricted-globals let busy = false; -// @ts-ignore // eslint-disable-next-line new-cap, no-undef const human = new Human.default(); diff --git a/demo/index.html b/demo/index.html index 1cf6f931..f1af65a9 100644 --- a/demo/index.html +++ b/demo/index.html @@ -89,9 +89,9 @@
-
-
-
+
+
+
diff --git a/demo/index.js b/demo/index.js index d62efd4d..d8797eac 100644 --- a/demo/index.js +++ b/demo/index.js @@ -20,9 +20,7 @@ // test url -// @ts-nocheck // typescript checks disabled as this is pure javascript - -import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human +import { Human } from '../dist/human.esm.js'; // equivalent of @vladmandic/human import Menu from './helpers/menu.js'; import GLBench from './helpers/gl-bench.js'; import webRTC from './helpers/webrtc.js'; @@ -153,7 +151,7 @@ let bench; let lastDetectedResult = {}; // helper function: async pause -// eslint-disable-next-line @typescript-eslint/no-unused-vars, no-unused-vars +// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars const delay = (ms) => new Promise((resolve) => { setTimeout(resolve, ms); }); // helper function: translates json to human readable string @@ -171,8 +169,7 @@ function str(...msg) { function log(...msg) { const dt = new Date(); const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`; - // eslint-disable-next-line no-console - if (ui.console) console.log(ts, ...msg); + if (ui.console) console.log(ts, ...msg); // eslint-disable-line no-console } let prevStatus = ''; @@ -349,12 +346,10 @@ async function drawResults(input) { videoPause(); ui.drawThread = null; } - } else { - if (ui.drawThread) { - log('stopping buffered refresh'); - cancelAnimationFrame(ui.drawThread); - ui.drawThread = null; - } + } else if (ui.drawThread) { + log('stopping buffered refresh'); + cancelAnimationFrame(ui.drawThread); + ui.drawThread = null; } } @@ -445,8 +440,7 @@ async function setupCamera() { ui.menuWidth.input.setAttribute('value', video.videoWidth); ui.menuHeight.input.setAttribute('value', video.videoHeight); if (live || ui.autoPlay) await videoPlay(); - // eslint-disable-next-line no-use-before-define - if ((live || ui.autoPlay) && !ui.detectThread) runHumanDetect(video, canvas); + if ((live || ui.autoPlay) && !ui.detectThread) runHumanDetect(video, canvas); // eslint-disable-line no-use-before-define return 'camera stream ready'; } @@ -500,8 +494,7 @@ function webWorker(input, image, canvas, timestamp) { ui.framesDetect++; if (!ui.drawThread) drawResults(input); if (isLive(input)) { - // eslint-disable-next-line no-use-before-define - ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now)); + ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now)); // eslint-disable-line no-use-before-define } }); } @@ -538,36 +531,39 @@ function runHumanDetect(input, canvas, timestamp) { // perform detection in worker webWorker(input, data, canvas, timestamp); } else { - human.detect(input, userConfig).then((result) => { - status(); - /* - setTimeout(async () => { // simulate gl context lost 2sec after initial detection - const ext = human.gl && human.gl.gl ? human.gl.gl.getExtension('WEBGL_lose_context') : {}; - if (ext && ext.loseContext) { - log('simulate context lost:', human.env.webgl, human.gl, ext); - human.gl.gl.getExtension('WEBGL_lose_context').loseContext(); - await videoPause(); - status('Exception: WebGL'); + human.detect(input, userConfig) + .then((result) => { + status(); + /* + setTimeout(async () => { // simulate gl context lost 2sec after initial detection + const ext = human.gl && human.gl.gl ? human.gl.gl.getExtension('WEBGL_lose_context') : {}; + if (ext && ext.loseContext) { + log('simulate context lost:', human.env.webgl, human.gl, ext); + human.gl.gl.getExtension('WEBGL_lose_context').loseContext(); + await videoPause(); + status('Exception: WebGL'); + } + }, 2000); + */ + if (result.performance && result.performance.total) ui.detectFPS.push(1000 / result.performance.total); + if (ui.detectFPS.length > ui.maxFPSframes) ui.detectFPS.shift(); + if (ui.bench) { + if (!bench) initPerfMonitor(); + bench.nextFrame(timestamp); } - }, 2000); - */ - if (result.performance && result.performance.total) ui.detectFPS.push(1000 / result.performance.total); - if (ui.detectFPS.length > ui.maxFPSframes) ui.detectFPS.shift(); - if (ui.bench) { - if (!bench) initPerfMonitor(); - bench.nextFrame(timestamp); - } - if (document.getElementById('gl-bench')) document.getElementById('gl-bench').style.display = ui.bench ? 'block' : 'none'; - if (result.error) { - log(result.error); - document.getElementById('log').innerText += `\nHuman error: ${result.error}`; - } else { - lastDetectedResult = result; - if (!ui.drawThread) drawResults(input); - ui.framesDetect++; - ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now)); - } - }); + if (document.getElementById('gl-bench')) document.getElementById('gl-bench').style.display = ui.bench ? 'block' : 'none'; + if (result.error) { + log(result.error); + document.getElementById('log').innerText += `\nHuman error: ${result.error}`; + } else { + lastDetectedResult = result; + if (!ui.drawThread) drawResults(input); + ui.framesDetect++; + ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now)); + } + return result; + }) + .catch(() => log('human detect error')); } } @@ -614,8 +610,7 @@ async function processImage(input, title) { // copy to clipboard on click if (typeof ClipboardItem !== 'undefined' && navigator.clipboard) { evt.target.toBlob((blob) => { - // eslint-disable-next-line no-undef - const item = new ClipboardItem({ 'image/png': blob }); + const item = new ClipboardItem({ 'image/png': blob }); // eslint-disable-line no-undef navigator.clipboard.write([item]); log('copied image to clipboard'); }); @@ -938,10 +933,10 @@ async function pwaRegister() { const regs = await navigator.serviceWorker.getRegistrations(); for (const reg of regs) { log('pwa found:', reg.scope); - if (reg.scope.startsWith(location.origin)) found = reg; + if (reg.scope.startsWith(window.location.origin)) found = reg; } if (!found) { - const reg = await navigator.serviceWorker.register(pwa.scriptFile, { scope: location.pathname }); + const reg = await navigator.serviceWorker.register(pwa.scriptFile, { scope: window.location.pathname }); found = reg; log('pwa registered:', reg.scope); } @@ -973,8 +968,7 @@ async function main() { if (ui.detectThread) cancelAnimationFrame(ui.detectThread); if (ui.drawThread) cancelAnimationFrame(ui.drawThread); const msg = evt.reason.message || evt.reason || evt; - // eslint-disable-next-line no-console - console.error(msg); + console.error(msg); // eslint-disable-line no-console document.getElementById('log').innerHTML = msg; status(`exception: ${msg}`); evt.preventDefault(); @@ -997,7 +991,7 @@ async function main() { await pwaRegister(); // parse url search params - const params = new URLSearchParams(location.search); + const params = new URLSearchParams(window.location.search); log('url options:', params.toString()); if (params.has('worker')) { ui.useWorker = JSON.parse(params.get('worker')); @@ -1040,10 +1034,8 @@ async function main() { // we've merged human defaults with user config and now lets store it back so it can be accessed by methods such as menu userConfig = human.config; if (typeof tf !== 'undefined') { - // eslint-disable-next-line no-undef - log('TensorFlow external version:', tf.version); - // eslint-disable-next-line no-undef - human.tf = tf; // use externally loaded version of tfjs + log('TensorFlow external version:', tf.version); // eslint-disable-line no-undef + human.tf = tf; // eslint-disable-line no-undef } log('tfjs version:', human.tf.version.tfjs); diff --git a/demo/multithread/index.js b/demo/multithread/index.js index cb282622..729dbcc0 100644 --- a/demo/multithread/index.js +++ b/demo/multithread/index.js @@ -5,7 +5,7 @@ * */ -import Human from '../../dist/human.esm.js'; // equivalent of @vladmandic/human +import { Human } from '../../dist/human.esm.js'; // equivalent of @vladmandic/human import GLBench from '../helpers/gl-bench.js'; const workerJS = './worker.js'; @@ -130,8 +130,7 @@ const result = { // initialize empty result object which will be partially fille function log(...msg) { const dt = new Date(); const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`; - // eslint-disable-next-line no-console - console.log(ts, ...msg); + console.log(ts, ...msg); // eslint-disable-line no-console } async function drawResults() { @@ -203,7 +202,9 @@ async function setupCamera() { }, }; // enumerate devices for diag purposes - navigator.mediaDevices.enumerateDevices().then((devices) => log('enumerated devices:', devices)); + navigator.mediaDevices.enumerateDevices() + .then((devices) => log('enumerated devices:', devices)) + .catch(() => log('mediaDevices error')); log('camera constraints', constraints); try { stream = await navigator.mediaDevices.getUserMedia(constraints); @@ -230,7 +231,7 @@ async function setupCamera() { }; }); // attach input to video element - if (stream && video) video['srcObject'] = stream; + if (stream && video) video.srcObject = stream; return promise; } diff --git a/demo/multithread/node-multiprocess-worker.js b/demo/multithread/node-multiprocess-worker.js index 8c6cc3d8..a1e12848 100644 --- a/demo/multithread/node-multiprocess-worker.js +++ b/demo/multithread/node-multiprocess-worker.js @@ -6,11 +6,10 @@ */ const fs = require('fs'); -const log = require('@vladmandic/pilogger'); +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require // workers actual import tfjs and human modules -// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars -const tf = require('@tensorflow/tfjs-node'); +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require const Human = require('../../dist/human.node.js').default; // or const Human = require('../dist/human.node-gpu.js').default; let human = null; @@ -64,11 +63,9 @@ async function main() { // on worker start first initialize message handler so we don't miss any messages process.on('message', (msg) => { - // @ts-ignore - if (msg.exit && process.exit) process.exit(); // if main told worker to exit - // @ts-ignore + // if main told worker to exit + if (msg.exit && process.exit) process.exit(); // eslint-disable-line no-process-exit if (msg.test && process.send) process.send({ test: true }); - // @ts-ignore if (msg.image) detect(msg.image); // if main told worker to process image log.data('Worker received message:', process.pid, msg); // generic log }); diff --git a/demo/multithread/node-multiprocess.js b/demo/multithread/node-multiprocess.js index fb01aec8..89096fb7 100644 --- a/demo/multithread/node-multiprocess.js +++ b/demo/multithread/node-multiprocess.js @@ -8,9 +8,8 @@ const fs = require('fs'); const path = require('path'); -// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require -const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features -const child_process = require('child_process'); +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require +const childProcess = require('child_process'); // eslint-disable-line camelcase // note that main process does not import human or tfjs at all, it's all done from worker process const workerFile = 'demo/multithread/node-multiprocess-worker.js'; @@ -74,7 +73,7 @@ async function main() { // manage worker processes for (let i = 0; i < numWorkers; i++) { // create worker process - workers[i] = await child_process.fork(workerFile, ['special']); + workers[i] = await childProcess.fork(workerFile, ['special']); // parse message that worker process sends back to main // if message is ready, dispatch next image in queue // if message is processing result, just print how many faces were detected diff --git a/demo/multithread/worker.js b/demo/multithread/worker.js index 79ddf65c..ec519a3d 100644 --- a/demo/multithread/worker.js +++ b/demo/multithread/worker.js @@ -1,7 +1,7 @@ /// // load Human using IIFE script as Chome Mobile does not support Modules as Workers -self.importScripts('../../dist/human.js'); +self.importScripts('../../dist/human.js'); // eslint-disable-line no-restricted-globals let human; @@ -9,9 +9,8 @@ onmessage = async (msg) => { // received from index.js using: // worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]); - // @ts-ignore // Human is registered as global namespace using IIFE script - // eslint-disable-next-line no-undef, new-cap - if (!human) human = new Human.default(msg.data.config); + // Human is registered as global namespace using IIFE script + if (!human) human = new Human.default(msg.data.config); // eslint-disable-line no-undef, new-cap const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height); let result = {}; result = await human.detect(image, msg.data.config); diff --git a/demo/nodejs/node-canvas.js b/demo/nodejs/node-canvas.js index 832a7fc9..bda13173 100644 --- a/demo/nodejs/node-canvas.js +++ b/demo/nodejs/node-canvas.js @@ -6,9 +6,10 @@ const fs = require('fs'); const process = require('process'); -const log = require('@vladmandic/pilogger'); -const canvas = require('canvas'); // eslint-disable-line node/no-extraneous-require, node/no-missing-require -const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require +// in nodejs environments tfjs-node is required to be loaded before human +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require +const canvas = require('canvas'); // eslint-disable-line node/no-unpublished-require // const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases) const Human = require('../../dist/human.node.js'); // use this when using human in dev mode @@ -63,7 +64,7 @@ async function main() { const face = persons[i].face; const faceTxt = face ? `score:${face.score} age:${face.age} gender:${face.gender} iris:${face.iris}` : null; const body = persons[i].body; - const bodyTxt = body ? `score:${body.score} keypoints:${body.keypoints?.length}` : null; + const bodyTxt = body ? `score:${body.score} keypoints:${body.keypoints.length}` : null; log.data(`Detected: #${i}: Face:${faceTxt} Body:${bodyTxt} LeftHand:${persons[i].hands.left ? 'yes' : 'no'} RightHand:${persons[i].hands.right ? 'yes' : 'no'} Gestures:${persons[i].gestures.length}`); } @@ -71,7 +72,6 @@ async function main() { const outputCanvas = new canvas.Canvas(inputImage.width, inputImage.height); // create canvas const outputCtx = outputCanvas.getContext('2d'); outputCtx.drawImage(result.canvas || inputImage, 0, 0); // draw input image onto canvas - // @ts-ignore canvas is not checked for typedefs human.draw.all(outputCanvas, result); // use human build-in method to draw results as overlays on canvas const outFile = fs.createWriteStream(output); // write canvas to new image file outFile.on('finish', () => log.state('Output image:', output, outputCanvas.width, outputCanvas.height)); diff --git a/demo/nodejs/node-event.js b/demo/nodejs/node-event.js index a5912849..14ca72d2 100644 --- a/demo/nodejs/node-event.js +++ b/demo/nodejs/node-event.js @@ -2,13 +2,14 @@ * Human demo for NodeJS */ -const log = require('@vladmandic/pilogger'); const fs = require('fs'); const process = require('process'); let fetch; // fetch is dynamically imported later -const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require +// in nodejs environments tfjs-node is required to be loaded before human +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require // const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases) const Human = require('../../dist/human.node.js'); // use this when using human in dev mode @@ -37,7 +38,7 @@ async function detect(input) { let buffer; log.info('Loading image:', input); if (input.startsWith('http:') || input.startsWith('https:')) { - fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-extraneous-require, node/no-missing-import + fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-unpublished-import const res = await fetch(input); if (res && res.ok) buffer = await res.buffer(); else log.error('Invalid image URL:', input, res.status, res.statusText, res.headers.get('content-type')); diff --git a/demo/nodejs/node-fetch.js b/demo/nodejs/node-fetch.js index aefe04c0..af3906ee 100644 --- a/demo/nodejs/node-fetch.js +++ b/demo/nodejs/node-fetch.js @@ -4,10 +4,10 @@ * Requires [node-fetch](https://www.npmjs.com/package/node-fetch) to provide `fetch` functionality in NodeJS environment */ const fs = require('fs'); -const log = require('@vladmandic/pilogger'); +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require -// eslint-disable-next-line import/no-extraneous-dependencies, no-unused-vars, @typescript-eslint/no-unused-vars -const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human +// in nodejs environments tfjs-node is required to be loaded before human +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require // const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases) const Human = require('../../dist/human.node.js'); // use this when using human in dev mode @@ -17,7 +17,7 @@ const humanConfig = { async function main(inputFile) { // @ts-ignore - global.fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-extraneous-require, node/no-missing-import + global.fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-unpublished-import const human = new Human.Human(humanConfig); // create instance of human using default configuration log.info('Human:', human.version, 'TF:', tf.version_core); await human.load(); // optional as models would be loaded on-demand first time they are required diff --git a/demo/nodejs/node-similarity.js b/demo/nodejs/node-similarity.js index a9402860..84cdddcc 100644 --- a/demo/nodejs/node-similarity.js +++ b/demo/nodejs/node-similarity.js @@ -2,12 +2,12 @@ * Human Person Similarity test for NodeJS */ -const log = require('@vladmandic/pilogger'); const fs = require('fs'); const process = require('process'); -// eslint-disable-next-line import/no-extraneous-dependencies, no-unused-vars, @typescript-eslint/no-unused-vars -const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require +// in nodejs environments tfjs-node is required to be loaded before human +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require // const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases) const Human = require('../../dist/human.node.js'); // use this when using human in dev mode @@ -25,7 +25,7 @@ const myConfig = { async function init() { human = new Human.Human(myConfig); await human.tf.ready(); - log.info('Human:', human.version); + log.info('Human:', human.version, 'TF:', tf.version_core); await human.load(); const loaded = Object.keys(human.models).filter((a) => human.models[a]); log.info('Loaded:', loaded); @@ -34,12 +34,11 @@ async function init() { async function detect(input) { if (!fs.existsSync(input)) { - log.error('Cannot load image:', input); - process.exit(1); + throw new Error('Cannot load image:', input); } const buffer = fs.readFileSync(input); const tensor = human.tf.node.decodeImage(buffer, 3); - log.state('Loaded image:', input, tensor['shape']); + log.state('Loaded image:', input, tensor.shape); const result = await human.detect(tensor, myConfig); human.tf.dispose(tensor); log.state('Detected faces:', result.face.length); @@ -50,15 +49,13 @@ async function main() { log.configure({ inspect: { breakLength: 265 } }); log.header(); if (process.argv.length !== 4) { - log.error('Parameters: missing'); - process.exit(1); + throw new Error('Parameters: missing'); } await init(); const res1 = await detect(process.argv[2]); const res2 = await detect(process.argv[3]); if (!res1 || !res1.face || res1.face.length === 0 || !res2 || !res2.face || res2.face.length === 0) { - log.error('Could not detect face descriptors'); - process.exit(1); + throw new Error('Could not detect face descriptors'); } const similarity = human.similarity(res1.face[0].embedding, res2.face[0].embedding, { order: 2 }); log.data('Similarity: ', similarity); diff --git a/demo/nodejs/node-simple.js b/demo/nodejs/node-simple.js index 57447726..e0ae5bc4 100644 --- a/demo/nodejs/node-simple.js +++ b/demo/nodejs/node-simple.js @@ -5,7 +5,8 @@ const fs = require('fs'); const process = require('process'); -const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human +// in nodejs environments tfjs-node is required to be loaded before human +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require // const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases) const Human = require('../../dist/human.node.js'); // use this when using human in dev mode diff --git a/demo/nodejs/node-video.js b/demo/nodejs/node-video.js index e9fbf830..fefed1d3 100644 --- a/demo/nodejs/node-video.js +++ b/demo/nodejs/node-video.js @@ -12,11 +12,11 @@ */ const spawn = require('child_process').spawn; -const log = require('@vladmandic/pilogger'); -// @ts-ignore pipe2jpeg is not installed by default -const Pipe2Jpeg = require('pipe2jpeg'); // eslint-disable-line node/no-missing-require - -const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require +// in nodejs environments tfjs-node is required to be loaded before human +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require +// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases) +const Pipe2Jpeg = require('pipe2jpeg'); // eslint-disable-line node/no-missing-require, import/no-unresolved // const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases) const Human = require('../../dist/human.node.js'); // use this when using human in dev mode diff --git a/demo/nodejs/node-webcam.js b/demo/nodejs/node-webcam.js index 22bed4ba..3c852525 100644 --- a/demo/nodejs/node-webcam.js +++ b/demo/nodejs/node-webcam.js @@ -7,10 +7,11 @@ */ let initial = true; // remember if this is the first run to print additional details -const log = require('@vladmandic/pilogger'); -const nodeWebCam = require('node-webcam'); // eslint-disable-line node/no-missing-require, node/no-extraneous-require +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require +const nodeWebCam = require('node-webcam'); // eslint-disable-line import/no-unresolved, node/no-missing-require -const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human +// in nodejs environments tfjs-node is required to be loaded before human +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require // const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases) const Human = require('../../dist/human.node.js'); // use this when using human in dev mode @@ -59,18 +60,20 @@ async function detect() { } else { const tensor = buffer2tensor(data); // create tensor from image buffer if (initial) log.data('input tensor:', tensor.shape); - // eslint-disable-next-line promise/no-promise-in-callback - human.detect(tensor).then((result) => { - if (result && result.face && result.face.length > 0) { - for (let i = 0; i < result.face.length; i++) { - const face = result.face[i]; - const emotion = face.emotion?.reduce((prev, curr) => (prev.score > curr.score ? prev : curr)); - log.data(`detected face: #${i} boxScore:${face.boxScore} faceScore:${face.faceScore} age:${face.age} genderScore:${face.genderScore} gender:${face.gender} emotionScore:${emotion?.score} emotion:${emotion?.emotion} iris:${face.iris}`); + human.detect(tensor) // eslint-disable-line promise/no-promise-in-callback + .then((result) => { + if (result && result.face && result.face.length > 0) { + for (let i = 0; i < result.face.length; i++) { + const face = result.face[i]; + const emotion = face.emotion?.reduce((prev, curr) => (prev.score > curr.score ? prev : curr)); + log.data(`detected face: #${i} boxScore:${face.boxScore} faceScore:${face.faceScore} age:${face.age} genderScore:${face.genderScore} gender:${face.gender} emotionScore:${emotion?.score} emotion:${emotion?.emotion} iris:${face.iris}`); + } + } else { + log.data(' Face: N/A'); } - } else { - log.data(' Face: N/A'); - } - }); + return result; + }) + .catch(() => log.error('human detect error')); } initial = false; }); diff --git a/demo/nodejs/node.js b/demo/nodejs/node.js index 4a699152..31d31dcd 100644 --- a/demo/nodejs/node.js +++ b/demo/nodejs/node.js @@ -4,14 +4,15 @@ * Requires [node-fetch](https://www.npmjs.com/package/node-fetch) to provide `fetch` functionality in NodeJS environment */ -const log = require('@vladmandic/pilogger'); +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require const fs = require('fs'); const path = require('path'); const process = require('process'); let fetch; // fetch is dynamically imported later -const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human +// in nodejs environments tfjs-node is required to be loaded before human +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require // const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases) const Human = require('../../dist/human.node.js'); // use this when using human in dev mode @@ -55,7 +56,7 @@ async function init() { const loaded = Object.keys(human.models).filter((a) => human.models[a]); log.info('Loaded:', loaded); // log.info('Memory state:', human.tf.engine().memory()); - log.data(tf.backend()['binding'] ? tf.backend()['binding']['TF_Version'] : null); + log.data(tf.backend().binding ? tf.backend().binding.TF_Version : null); } async function detect(input) { @@ -88,7 +89,7 @@ async function detect(input) { }); // image shape contains image dimensions and depth - log.state('Processing:', tensor['shape']); + log.state('Processing:', tensor.shape); // run actual detection let result; @@ -191,7 +192,7 @@ async function main() { log.configure({ inspect: { breakLength: 265 } }); log.header(); log.info('Current folder:', process.env.PWD); - fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-extraneous-require, node/no-missing-import + fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-unpublished-import await init(); const f = process.argv[2]; if (process.argv.length !== 3) { @@ -199,20 +200,18 @@ async function main() { await test(); } else if (!fs.existsSync(f) && !f.startsWith('http')) { log.error(`File not found: ${process.argv[2]}`); - } else { - if (fs.existsSync(f)) { - const stat = fs.statSync(f); - if (stat.isDirectory()) { - const dir = fs.readdirSync(f); - for (const file of dir) { - await detect(path.join(f, file)); - } - } else { - await detect(f); + } else if (fs.existsSync(f)) { + const stat = fs.statSync(f); + if (stat.isDirectory()) { + const dir = fs.readdirSync(f); + for (const file of dir) { + await detect(path.join(f, file)); } } else { await detect(f); } + } else { + await detect(f); } } diff --git a/demo/nodejs/process-folder.js b/demo/nodejs/process-folder.js index 05c26457..51bcf1f9 100644 --- a/demo/nodejs/process-folder.js +++ b/demo/nodejs/process-folder.js @@ -10,9 +10,10 @@ const fs = require('fs'); const path = require('path'); const process = require('process'); -const log = require('@vladmandic/pilogger'); -const canvas = require('canvas'); // eslint-disable-line node/no-extraneous-require, node/no-missing-require -const tf = require('@tensorflow/tfjs-node-gpu'); // for nodejs, `tfjs-node` or `tfjs-node-gpu` should be loaded before using Human +const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require +const canvas = require('canvas'); // eslint-disable-line node/no-unpublished-require +// for nodejs, `tfjs-node` or `tfjs-node-gpu` should be loaded before using Human +const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require const Human = require('../../dist/human.node-gpu.js'); // this is 'const Human = require('../dist/human.node-gpu.js').default;' const config = { // just enable all and leave default settings @@ -72,14 +73,12 @@ async function main() { const outputCtx = outputCanvas.getContext('2d'); const inputImage = await canvas.loadImage(buffer); // load image using canvas library outputCtx.drawImage(inputImage, 0, 0); // draw input image onto canvas - // @ts-ignore human.draw.all(outputCanvas, result); // use human build-in method to draw results as overlays on canvas const outFile = path.join(outDir, image); const outStream = fs.createWriteStream(outFile); // write canvas to new image file outStream.on('finish', () => log.state('Output image:', outFile, outputCanvas.width, outputCanvas.height)); outStream.on('error', (err) => log.error('Output error:', outFile, err)); const stream = outputCanvas.createJPEGStream({ quality: 0.5, progressive: true, chromaSubsampling: true }); - // @ts-ignore stream.pipe(outStream); } } diff --git a/demo/offline.html b/demo/offline.html index fa9e6e7a..5dd71090 100644 --- a/demo/offline.html +++ b/demo/offline.html @@ -24,13 +24,13 @@ a:hover { color: lightskyblue; text-decoration: none; } .row { width: 90vw; margin: auto; margin-top: 100px; text-align: center; } - - -
-

- Human: Offline
- icon -

-
- + + +
+

+ Human: Offline
+ icon +

+
+ diff --git a/demo/typescript/index.js b/demo/typescript/index.js index 2d02856b..657dfb81 100644 --- a/demo/typescript/index.js +++ b/demo/typescript/index.js @@ -4,6 +4,6 @@ author: ' */ -import{Human as p}from"../../dist/human.esm.js";var w={async:!1,modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},e=new p(w);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var t={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},o={detectFPS:0,drawFPS:0,frames:0,averageMs:0},i=(...a)=>{t.log.innerText+=a.join(" ")+` -`,console.log(...a)},r=a=>t.fps.innerText=a,b=a=>t.perf.innerText="tensors:"+e.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth},height:{ideal:document.body.clientHeight}}},d=await navigator.mediaDevices.getUserMedia(a),m=new Promise(u=>{t.video.onloadeddata=()=>u(!0)});t.video.srcObject=d,t.video.play(),await m,t.canvas.width=t.video.videoWidth,t.canvas.height=t.video.videoHeight;let s=d.getVideoTracks()[0],f=s.getCapabilities?s.getCapabilities():"",v=s.getSettings?s.getSettings():"",g=s.getConstraints?s.getConstraints():"";i("video:",t.video.videoWidth,t.video.videoHeight,s.label,{stream:d,track:s,settings:v,constraints:g,capabilities:f}),t.canvas.onclick=()=>{t.video.paused?t.video.play():t.video.pause()}}async function c(){if(!t.video.paused){n.start===0&&(n.start=e.now()),await e.detect(t.video);let a=e.tf.memory().numTensors;a-n.tensors!==0&&i("allocated tensors:",a-n.tensors),n.tensors=a,o.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,o.frames++,o.averageMs=Math.round(1e3*(e.now()-n.start)/o.frames)/1e3,o.frames%100===0&&!t.video.paused&&i("performance",{...o,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(c)}async function l(){if(!t.video.paused){let d=await e.next(e.result);e.config.filter.flip?await e.draw.canvas(d.canvas,t.canvas):await e.draw.canvas(t.video,t.canvas),await e.draw.all(t.canvas,d),b(d.performance)}let a=e.now();o.drawFPS=Math.round(1e3*1e3/(a-n.draw))/1e3,n.draw=a,r(t.video.paused?"paused":`fps: ${o.detectFPS.toFixed(1).padStart(5," ")} detect | ${o.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(l,30)}async function M(){i("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),i("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),i("backend:",e.tf.getBackend(),"| available:",e.env.backends),i("models stats:",e.getModelStats()),i("models loaded:",Object.values(e.models).filter(a=>a!==null).length),r("initializing..."),await e.warmup(),await h(),await c(),await l()}window.onload=M; +import*as c from"../../dist/human.esm.js";var w={async:!1,modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},e=new c.Human(w);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var t={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},i=(...a)=>{t.log.innerText+=a.join(" ")+` +`,console.log(...a)},r=a=>t.fps.innerText=a,b=a=>t.perf.innerText="tensors:"+e.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth},height:{ideal:document.body.clientHeight}}},d=await navigator.mediaDevices.getUserMedia(a),f=new Promise(p=>{t.video.onloadeddata=()=>p(!0)});t.video.srcObject=d,t.video.play(),await f,t.canvas.width=t.video.videoWidth,t.canvas.height=t.video.videoHeight;let o=d.getVideoTracks()[0],v=o.getCapabilities?o.getCapabilities():"",g=o.getSettings?o.getSettings():"",u=o.getConstraints?o.getConstraints():"";i("video:",t.video.videoWidth,t.video.videoHeight,o.label,{stream:d,track:o,settings:g,constraints:u,capabilities:v}),t.canvas.onclick=()=>{t.video.paused?t.video.play():t.video.pause()}}async function l(){if(!t.video.paused){n.start===0&&(n.start=e.now()),await e.detect(t.video);let a=e.tf.memory().numTensors;a-n.tensors!==0&&i("allocated tensors:",a-n.tensors),n.tensors=a,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!t.video.paused&&i("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(l)}async function m(){if(!t.video.paused){let d=await e.next(e.result);e.config.filter.flip?await e.draw.canvas(d.canvas,t.canvas):await e.draw.canvas(t.video,t.canvas),await e.draw.all(t.canvas,d),b(d.performance)}let a=e.now();s.drawFPS=Math.round(1e3*1e3/(a-n.draw))/1e3,n.draw=a,r(t.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(m,30)}async function M(){i("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),i("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),i("backend:",e.tf.getBackend(),"| available:",e.env.backends),i("models stats:",e.getModelStats()),i("models loaded:",Object.values(e.models).filter(a=>a!==null).length),r("initializing..."),await e.warmup(),await h(),await l(),await m()}window.onload=M; //# sourceMappingURL=index.js.map diff --git a/demo/typescript/index.js.map b/demo/typescript/index.js.map index bf3e4f62..d60e5866 100644 --- a/demo/typescript/index.js.map +++ b/demo/typescript/index.js.map @@ -1,7 +1,7 @@ { "version": 3, "sources": ["index.ts"], - "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport { Human, Config } from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\n\nconst humanConfig: Partial = { // user configuration for human, used to fine-tune behavior\n // backend: 'wasm' as const,\n // wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.18.0/dist/',\n // cacheSensitivity: 0,\n async: false,\n modelBasePath: '../../models',\n filter: { enabled: true, equalization: false, flip: false },\n face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },\n body: { enabled: true },\n hand: { enabled: true },\n object: { enabled: false },\n gesture: { enabled: true },\n};\n\nconst human = new Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env['perfadd'] = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n// human.draw.options.fillPolygons = true;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('status') as HTMLPreElement,\n perf: document.getElementById('performance') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0, tensors: 0, start: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 }; // holds calculated fps information for both detect and screen refresh\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n // eslint-disable-next-line no-console\n console.log(...msg);\n};\nconst status = (msg) => dom.fps.innerText = msg; // print status element\nconst perf = (msg) => dom.perf.innerText = 'tensors:' + human.tf.memory().numTensors + ' | performance: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | '); // print performance element\n\nasync function webCam() { // initialize webcam\n status('starting webcam...');\n // @ts-ignore resizeMode is not yet defined in tslib\n const options: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth }, height: { ideal: document.body.clientHeight } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n const track: MediaStreamTrack = stream.getVideoTracks()[0];\n const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';\n const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';\n const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';\n log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });\n dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click\n if (dom.video.paused) dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n if (timestamp.start === 0) timestamp.start = human.now();\n // log('profiling data:', await human.profile(dom.video));\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const tensors = human.tf.memory().numTensors; // check current tensor usage for memory leaks\n if (tensors - timestamp.tensors !== 0) log('allocated tensors:', tensors - timestamp.tensors); // printed on start and each time there is a tensor leak\n timestamp.tensors = tensors;\n fps.detectFPS = Math.round(1000 * 1000 / (human.now() - timestamp.detect)) / 1000;\n fps.frames++;\n fps.averageMs = Math.round(1000 * (human.now() - timestamp.start) / fps.frames) / 1000;\n if (fps.frames % 100 === 0 && !dom.video.paused) log('performance', { ...fps, tensors: timestamp.tensors });\n }\n timestamp.detect = human.now();\n requestAnimationFrame(detectionLoop); // start new frame immediately\n}\n\nasync function drawLoop() { // main screen refresh loop\n if (!dom.video.paused) {\n const interpolated = await human.next(human.result); // smoothen result using last-known results\n if (human.config.filter.flip) await human.draw.canvas(interpolated.canvas as HTMLCanvasElement, dom.canvas); // draw processed image to screen canvas\n else await human.draw.canvas(dom.video, dom.canvas); // draw original video to screen canvas // better than using procesed image as this loop happens faster than processing loop\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n perf(interpolated.performance); // write performance data\n }\n const now = human.now();\n fps.drawFPS = Math.round(1000 * 1000 / (now - timestamp.draw)) / 1000;\n timestamp.draw = now;\n status(dom.video.paused ? 'paused' : `fps: ${fps.detectFPS.toFixed(1).padStart(5, ' ')} detect | ${fps.drawFPS.toFixed(1).padStart(5, ' ')} draw`); // write status\n setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n}\n\nasync function main() { // main entry point\n log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);\n log('platform:', human.env.platform, '| agent:', human.env.agent);\n status('loading...');\n await human.load(); // preload all models\n log('backend:', human.tf.getBackend(), '| available:', human.env.backends);\n log('models stats:', human.getModelStats());\n log('models loaded:', Object.values(human.models).filter((model) => model !== null).length);\n status('initializing...');\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await webCam(); // start webcam\n await detectionLoop(); // start detection loop\n await drawLoop(); // start draw loop\n}\n\nwindow.onload = main;\n"], - "mappings": ";;;;;;AASA,OAAS,SAAAA,MAAqB,0BAE9B,IAAMC,EAA+B,CAInC,MAAO,GACP,cAAe,eACf,OAAQ,CAAE,QAAS,GAAM,aAAc,GAAO,KAAM,EAAM,EAC1D,KAAM,CAAE,QAAS,GAAM,SAAU,CAAE,SAAU,EAAM,EAAG,KAAM,CAAE,QAAS,EAAK,EAAG,UAAW,CAAE,QAAS,EAAM,EAAG,KAAM,CAAE,QAAS,EAAK,EAAG,YAAa,CAAE,QAAS,EAAK,EAAG,QAAS,CAAE,QAAS,EAAK,CAAE,EAClM,KAAM,CAAE,QAAS,EAAK,EACtB,KAAM,CAAE,QAAS,EAAK,EACtB,OAAQ,CAAE,QAAS,EAAM,EACzB,QAAS,CAAE,QAAS,EAAK,CAC3B,EAEMC,EAAQ,IAAIF,EAAMC,CAAW,EAEnCC,EAAM,IAAI,QAAa,GACvBA,EAAM,KAAK,QAAQ,KAAO,yBAC1BA,EAAM,KAAK,QAAQ,WAAa,GAGhC,IAAMC,EAAM,CACV,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,IAAK,SAAS,eAAe,KAAK,EAClC,IAAK,SAAS,eAAe,QAAQ,EACrC,KAAM,SAAS,eAAe,aAAa,CAC7C,EACMC,EAAY,CAAE,OAAQ,EAAG,KAAM,EAAG,QAAS,EAAG,MAAO,CAAE,EACvDC,EAAM,CAAE,UAAW,EAAG,QAAS,EAAG,OAAQ,EAAG,UAAW,CAAE,EAE1DC,EAAM,IAAIC,IAAQ,CACtBJ,EAAI,IAAI,WAAaI,EAAI,KAAK,GAAG,EAAI;AAAA,EAErC,QAAQ,IAAI,GAAGA,CAAG,CACpB,EACMC,EAAUD,GAAQJ,EAAI,IAAI,UAAYI,EACtCE,EAAQF,GAAQJ,EAAI,KAAK,UAAY,WAAaD,EAAM,GAAG,OAAO,EAAE,WAAa,mBAAqB,KAAK,UAAUK,CAAG,EAAE,QAAQ,SAAU,EAAE,EAAE,QAAQ,KAAM,KAAK,EAEzK,eAAeG,GAAS,CACtBF,EAAO,oBAAoB,EAE3B,IAAMG,EAAkC,CAAE,MAAO,GAAO,MAAO,CAAE,WAAY,OAAQ,WAAY,OAAQ,MAAO,CAAE,MAAO,SAAS,KAAK,WAAY,EAAG,OAAQ,CAAE,MAAO,SAAS,KAAK,YAAa,CAAE,CAAE,EAChMC,EAAsB,MAAM,UAAU,aAAa,aAAaD,CAAO,EACvEE,EAAQ,IAAI,QAASC,GAAY,CAAEX,EAAI,MAAM,aAAe,IAAMW,EAAQ,EAAI,CAAG,CAAC,EACxFX,EAAI,MAAM,UAAYS,EACtBT,EAAI,MAAM,KAAK,EACf,MAAMU,EACNV,EAAI,OAAO,MAAQA,EAAI,MAAM,WAC7BA,EAAI,OAAO,OAASA,EAAI,MAAM,YAC9B,IAAMY,EAA0BH,EAAO,eAAe,EAAE,GAClDI,EAAgDD,EAAM,gBAAkBA,EAAM,gBAAgB,EAAI,GAClGE,EAAwCF,EAAM,YAAcA,EAAM,YAAY,EAAI,GAClFG,EAA8CH,EAAM,eAAiBA,EAAM,eAAe,EAAI,GACpGT,EAAI,SAAUH,EAAI,MAAM,WAAYA,EAAI,MAAM,YAAaY,EAAM,MAAO,CAAE,OAAAH,EAAQ,MAAAG,EAAO,SAAAE,EAAU,YAAAC,EAAa,aAAAF,CAAa,CAAC,EAC9Hb,EAAI,OAAO,QAAU,IAAM,CACrBA,EAAI,MAAM,OAAQA,EAAI,MAAM,KAAK,EAChCA,EAAI,MAAM,MAAM,CACvB,CACF,CAEA,eAAegB,GAAgB,CAC7B,GAAI,CAAChB,EAAI,MAAM,OAAQ,CACjBC,EAAU,QAAU,IAAGA,EAAU,MAAQF,EAAM,IAAI,GAEvD,MAAMA,EAAM,OAAOC,EAAI,KAAK,EAC5B,IAAMiB,EAAUlB,EAAM,GAAG,OAAO,EAAE,WAC9BkB,EAAUhB,EAAU,UAAY,GAAGE,EAAI,qBAAsBc,EAAUhB,EAAU,OAAO,EAC5FA,EAAU,QAAUgB,EACpBf,EAAI,UAAY,KAAK,MAAM,IAAO,KAAQH,EAAM,IAAI,EAAIE,EAAU,OAAO,EAAI,IAC7EC,EAAI,SACJA,EAAI,UAAY,KAAK,MAAM,KAAQH,EAAM,IAAI,EAAIE,EAAU,OAASC,EAAI,MAAM,EAAI,IAC9EA,EAAI,OAAS,MAAQ,GAAK,CAACF,EAAI,MAAM,QAAQG,EAAI,cAAe,CAAE,GAAGD,EAAK,QAASD,EAAU,OAAQ,CAAC,CAC5G,CACAA,EAAU,OAASF,EAAM,IAAI,EAC7B,sBAAsBiB,CAAa,CACrC,CAEA,eAAeE,GAAW,CACxB,GAAI,CAAClB,EAAI,MAAM,OAAQ,CACrB,IAAMmB,EAAe,MAAMpB,EAAM,KAAKA,EAAM,MAAM,EAC9CA,EAAM,OAAO,OAAO,KAAM,MAAMA,EAAM,KAAK,OAAOoB,EAAa,OAA6BnB,EAAI,MAAM,EACrG,MAAMD,EAAM,KAAK,OAAOC,EAAI,MAAOA,EAAI,MAAM,EAClD,MAAMD,EAAM,KAAK,IAAIC,EAAI,OAAQmB,CAAY,EAC7Cb,EAAKa,EAAa,WAAW,CAC/B,CACA,IAAMC,EAAMrB,EAAM,IAAI,EACtBG,EAAI,QAAU,KAAK,MAAM,IAAO,KAAQkB,EAAMnB,EAAU,KAAK,EAAI,IACjEA,EAAU,KAAOmB,EACjBf,EAAOL,EAAI,MAAM,OAAS,SAAW,QAAQE,EAAI,UAAU,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,cAAcA,EAAI,QAAQ,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,QAAQ,EACjJ,WAAWgB,EAAU,EAAE,CACzB,CAEA,eAAeG,GAAO,CACpBlB,EAAI,iBAAkBJ,EAAM,QAAS,kBAAmBA,EAAM,GAAG,QAAQ,YAAY,EACrFI,EAAI,YAAaJ,EAAM,IAAI,SAAU,WAAYA,EAAM,IAAI,KAAK,EAChEM,EAAO,YAAY,EACnB,MAAMN,EAAM,KAAK,EACjBI,EAAI,WAAYJ,EAAM,GAAG,WAAW,EAAG,eAAgBA,EAAM,IAAI,QAAQ,EACzEI,EAAI,gBAAiBJ,EAAM,cAAc,CAAC,EAC1CI,EAAI,iBAAkB,OAAO,OAAOJ,EAAM,MAAM,EAAE,OAAQuB,GAAUA,IAAU,IAAI,EAAE,MAAM,EAC1FjB,EAAO,iBAAiB,EACxB,MAAMN,EAAM,OAAO,EACnB,MAAMQ,EAAO,EACb,MAAMS,EAAc,EACpB,MAAME,EAAS,CACjB,CAEA,OAAO,OAASG", - "names": ["Human", "humanConfig", "human", "dom", "timestamp", "fps", "log", "msg", "status", "perf", "webCam", "options", "stream", "ready", "resolve", "track", "capabilities", "settings", "constraints", "detectionLoop", "tensors", "drawLoop", "interpolated", "now", "main", "model"] + "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\n\nconst humanConfig: Partial = { // user configuration for human, used to fine-tune behavior\n // backend: 'wasm' as const,\n // wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.18.0/dist/',\n // cacheSensitivity: 0,\n async: false,\n modelBasePath: '../../models',\n filter: { enabled: true, equalization: false, flip: false },\n face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },\n body: { enabled: true },\n hand: { enabled: true },\n object: { enabled: false },\n gesture: { enabled: true },\n};\n\nconst human = new H.Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env.perfadd = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n// human.draw.options.fillPolygons = true;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('status') as HTMLPreElement,\n perf: document.getElementById('performance') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0, tensors: 0, start: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 }; // holds calculated fps information for both detect and screen refresh\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n console.log(...msg); // eslint-disable-line no-console\n};\nconst status = (msg) => dom.fps.innerText = msg; // print status element\nconst perf = (msg) => dom.perf.innerText = 'tensors:' + human.tf.memory().numTensors + ' | performance: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | '); // print performance element\n\nasync function webCam() { // initialize webcam\n status('starting webcam...');\n // @ts-ignore resizeMode is not yet defined in tslib\n const options: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth }, height: { ideal: document.body.clientHeight } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n const track: MediaStreamTrack = stream.getVideoTracks()[0];\n const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';\n const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';\n const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';\n log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });\n dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click\n if (dom.video.paused) dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n if (timestamp.start === 0) timestamp.start = human.now();\n // log('profiling data:', await human.profile(dom.video));\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const tensors = human.tf.memory().numTensors; // check current tensor usage for memory leaks\n if (tensors - timestamp.tensors !== 0) log('allocated tensors:', tensors - timestamp.tensors); // printed on start and each time there is a tensor leak\n timestamp.tensors = tensors;\n fps.detectFPS = Math.round(1000 * 1000 / (human.now() - timestamp.detect)) / 1000;\n fps.frames++;\n fps.averageMs = Math.round(1000 * (human.now() - timestamp.start) / fps.frames) / 1000;\n if (fps.frames % 100 === 0 && !dom.video.paused) log('performance', { ...fps, tensors: timestamp.tensors });\n }\n timestamp.detect = human.now();\n requestAnimationFrame(detectionLoop); // start new frame immediately\n}\n\nasync function drawLoop() { // main screen refresh loop\n if (!dom.video.paused) {\n const interpolated = await human.next(human.result); // smoothen result using last-known results\n if (human.config.filter.flip) await human.draw.canvas(interpolated.canvas as HTMLCanvasElement, dom.canvas); // draw processed image to screen canvas\n else await human.draw.canvas(dom.video, dom.canvas); // draw original video to screen canvas // better than using procesed image as this loop happens faster than processing loop\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n perf(interpolated.performance); // write performance data\n }\n const now = human.now();\n fps.drawFPS = Math.round(1000 * 1000 / (now - timestamp.draw)) / 1000;\n timestamp.draw = now;\n status(dom.video.paused ? 'paused' : `fps: ${fps.detectFPS.toFixed(1).padStart(5, ' ')} detect | ${fps.drawFPS.toFixed(1).padStart(5, ' ')} draw`); // write status\n setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n}\n\nasync function main() { // main entry point\n log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);\n log('platform:', human.env.platform, '| agent:', human.env.agent);\n status('loading...');\n await human.load(); // preload all models\n log('backend:', human.tf.getBackend(), '| available:', human.env.backends);\n log('models stats:', human.getModelStats());\n log('models loaded:', Object.values(human.models).filter((model) => model !== null).length);\n status('initializing...');\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await webCam(); // start webcam\n await detectionLoop(); // start detection loop\n await drawLoop(); // start draw loop\n}\n\nwindow.onload = main;\n"], + "mappings": ";;;;;;AASA,UAAYA,MAAO,0BAEnB,IAAMC,EAAiC,CAIrC,MAAO,GACP,cAAe,eACf,OAAQ,CAAE,QAAS,GAAM,aAAc,GAAO,KAAM,EAAM,EAC1D,KAAM,CAAE,QAAS,GAAM,SAAU,CAAE,SAAU,EAAM,EAAG,KAAM,CAAE,QAAS,EAAK,EAAG,UAAW,CAAE,QAAS,EAAM,EAAG,KAAM,CAAE,QAAS,EAAK,EAAG,YAAa,CAAE,QAAS,EAAK,EAAG,QAAS,CAAE,QAAS,EAAK,CAAE,EAClM,KAAM,CAAE,QAAS,EAAK,EACtB,KAAM,CAAE,QAAS,EAAK,EACtB,OAAQ,CAAE,QAAS,EAAM,EACzB,QAAS,CAAE,QAAS,EAAK,CAC3B,EAEMC,EAAQ,IAAM,QAAMD,CAAW,EAErCC,EAAM,IAAI,QAAU,GACpBA,EAAM,KAAK,QAAQ,KAAO,yBAC1BA,EAAM,KAAK,QAAQ,WAAa,GAGhC,IAAMC,EAAM,CACV,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,IAAK,SAAS,eAAe,KAAK,EAClC,IAAK,SAAS,eAAe,QAAQ,EACrC,KAAM,SAAS,eAAe,aAAa,CAC7C,EACMC,EAAY,CAAE,OAAQ,EAAG,KAAM,EAAG,QAAS,EAAG,MAAO,CAAE,EACvDC,EAAM,CAAE,UAAW,EAAG,QAAS,EAAG,OAAQ,EAAG,UAAW,CAAE,EAE1DC,EAAM,IAAIC,IAAQ,CACtBJ,EAAI,IAAI,WAAaI,EAAI,KAAK,GAAG,EAAI;AAAA,EACrC,QAAQ,IAAI,GAAGA,CAAG,CACpB,EACMC,EAAUD,GAAQJ,EAAI,IAAI,UAAYI,EACtCE,EAAQF,GAAQJ,EAAI,KAAK,UAAY,WAAaD,EAAM,GAAG,OAAO,EAAE,WAAa,mBAAqB,KAAK,UAAUK,CAAG,EAAE,QAAQ,SAAU,EAAE,EAAE,QAAQ,KAAM,KAAK,EAEzK,eAAeG,GAAS,CACtBF,EAAO,oBAAoB,EAE3B,IAAMG,EAAkC,CAAE,MAAO,GAAO,MAAO,CAAE,WAAY,OAAQ,WAAY,OAAQ,MAAO,CAAE,MAAO,SAAS,KAAK,WAAY,EAAG,OAAQ,CAAE,MAAO,SAAS,KAAK,YAAa,CAAE,CAAE,EAChMC,EAAsB,MAAM,UAAU,aAAa,aAAaD,CAAO,EACvEE,EAAQ,IAAI,QAASC,GAAY,CAAEX,EAAI,MAAM,aAAe,IAAMW,EAAQ,EAAI,CAAG,CAAC,EACxFX,EAAI,MAAM,UAAYS,EACtBT,EAAI,MAAM,KAAK,EACf,MAAMU,EACNV,EAAI,OAAO,MAAQA,EAAI,MAAM,WAC7BA,EAAI,OAAO,OAASA,EAAI,MAAM,YAC9B,IAAMY,EAA0BH,EAAO,eAAe,EAAE,GAClDI,EAAgDD,EAAM,gBAAkBA,EAAM,gBAAgB,EAAI,GAClGE,EAAwCF,EAAM,YAAcA,EAAM,YAAY,EAAI,GAClFG,EAA8CH,EAAM,eAAiBA,EAAM,eAAe,EAAI,GACpGT,EAAI,SAAUH,EAAI,MAAM,WAAYA,EAAI,MAAM,YAAaY,EAAM,MAAO,CAAE,OAAAH,EAAQ,MAAAG,EAAO,SAAAE,EAAU,YAAAC,EAAa,aAAAF,CAAa,CAAC,EAC9Hb,EAAI,OAAO,QAAU,IAAM,CACrBA,EAAI,MAAM,OAAQA,EAAI,MAAM,KAAK,EAChCA,EAAI,MAAM,MAAM,CACvB,CACF,CAEA,eAAegB,GAAgB,CAC7B,GAAI,CAAChB,EAAI,MAAM,OAAQ,CACjBC,EAAU,QAAU,IAAGA,EAAU,MAAQF,EAAM,IAAI,GAEvD,MAAMA,EAAM,OAAOC,EAAI,KAAK,EAC5B,IAAMiB,EAAUlB,EAAM,GAAG,OAAO,EAAE,WAC9BkB,EAAUhB,EAAU,UAAY,GAAGE,EAAI,qBAAsBc,EAAUhB,EAAU,OAAO,EAC5FA,EAAU,QAAUgB,EACpBf,EAAI,UAAY,KAAK,MAAM,IAAO,KAAQH,EAAM,IAAI,EAAIE,EAAU,OAAO,EAAI,IAC7EC,EAAI,SACJA,EAAI,UAAY,KAAK,MAAM,KAAQH,EAAM,IAAI,EAAIE,EAAU,OAASC,EAAI,MAAM,EAAI,IAC9EA,EAAI,OAAS,MAAQ,GAAK,CAACF,EAAI,MAAM,QAAQG,EAAI,cAAe,CAAE,GAAGD,EAAK,QAASD,EAAU,OAAQ,CAAC,CAC5G,CACAA,EAAU,OAASF,EAAM,IAAI,EAC7B,sBAAsBiB,CAAa,CACrC,CAEA,eAAeE,GAAW,CACxB,GAAI,CAAClB,EAAI,MAAM,OAAQ,CACrB,IAAMmB,EAAe,MAAMpB,EAAM,KAAKA,EAAM,MAAM,EAC9CA,EAAM,OAAO,OAAO,KAAM,MAAMA,EAAM,KAAK,OAAOoB,EAAa,OAA6BnB,EAAI,MAAM,EACrG,MAAMD,EAAM,KAAK,OAAOC,EAAI,MAAOA,EAAI,MAAM,EAClD,MAAMD,EAAM,KAAK,IAAIC,EAAI,OAAQmB,CAAY,EAC7Cb,EAAKa,EAAa,WAAW,CAC/B,CACA,IAAMC,EAAMrB,EAAM,IAAI,EACtBG,EAAI,QAAU,KAAK,MAAM,IAAO,KAAQkB,EAAMnB,EAAU,KAAK,EAAI,IACjEA,EAAU,KAAOmB,EACjBf,EAAOL,EAAI,MAAM,OAAS,SAAW,QAAQE,EAAI,UAAU,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,cAAcA,EAAI,QAAQ,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,QAAQ,EACjJ,WAAWgB,EAAU,EAAE,CACzB,CAEA,eAAeG,GAAO,CACpBlB,EAAI,iBAAkBJ,EAAM,QAAS,kBAAmBA,EAAM,GAAG,QAAQ,YAAY,EACrFI,EAAI,YAAaJ,EAAM,IAAI,SAAU,WAAYA,EAAM,IAAI,KAAK,EAChEM,EAAO,YAAY,EACnB,MAAMN,EAAM,KAAK,EACjBI,EAAI,WAAYJ,EAAM,GAAG,WAAW,EAAG,eAAgBA,EAAM,IAAI,QAAQ,EACzEI,EAAI,gBAAiBJ,EAAM,cAAc,CAAC,EAC1CI,EAAI,iBAAkB,OAAO,OAAOJ,EAAM,MAAM,EAAE,OAAQuB,GAAUA,IAAU,IAAI,EAAE,MAAM,EAC1FjB,EAAO,iBAAiB,EACxB,MAAMN,EAAM,OAAO,EACnB,MAAMQ,EAAO,EACb,MAAMS,EAAc,EACpB,MAAME,EAAS,CACjB,CAEA,OAAO,OAASG", + "names": ["H", "humanConfig", "human", "dom", "timestamp", "fps", "log", "msg", "status", "perf", "webCam", "options", "stream", "ready", "resolve", "track", "capabilities", "settings", "constraints", "detectionLoop", "tensors", "drawLoop", "interpolated", "now", "main", "model"] } diff --git a/demo/typescript/index.ts b/demo/typescript/index.ts index d487be9e..80e3a2d8 100644 --- a/demo/typescript/index.ts +++ b/demo/typescript/index.ts @@ -7,9 +7,9 @@ * @license MIT */ -import { Human, Config } from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human +import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human -const humanConfig: Partial = { // user configuration for human, used to fine-tune behavior +const humanConfig: Partial = { // user configuration for human, used to fine-tune behavior // backend: 'wasm' as const, // wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.18.0/dist/', // cacheSensitivity: 0, @@ -23,9 +23,9 @@ const humanConfig: Partial = { // user configuration for human, used to gesture: { enabled: true }, }; -const human = new Human(humanConfig); // create instance of human with overrides from user configuration +const human = new H.Human(humanConfig); // create instance of human with overrides from user configuration -human.env['perfadd'] = false; // is performance data showing instant or total values +human.env.perfadd = false; // is performance data showing instant or total values human.draw.options.font = 'small-caps 18px "Lato"'; // set font used to draw labels when using draw methods human.draw.options.lineHeight = 20; // human.draw.options.fillPolygons = true; @@ -42,8 +42,7 @@ const fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 }; // holds calc const log = (...msg) => { // helper method to output messages dom.log.innerText += msg.join(' ') + '\n'; - // eslint-disable-next-line no-console - console.log(...msg); + console.log(...msg); // eslint-disable-line no-console }; const status = (msg) => dom.fps.innerText = msg; // print status element const perf = (msg) => dom.perf.innerText = 'tensors:' + human.tf.memory().numTensors + ' | performance: ' + JSON.stringify(msg).replace(/"|{|}/g, '').replace(/,/g, ' | '); // print performance element diff --git a/package.json b/package.json index 5937c191..a4c9ec67 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,7 @@ "clean": "build --profile clean", "build": "rimraf test/build.log && node build.js", "test": "node --no-warnings --unhandled-rejections=strict --trace-uncaught test/node.js", - "lint": "eslint src demo test", + "lint": "eslint *.json *.js src demo test models", "scan": "npx auditjs@latest ossi --dev --quiet" }, "keywords": [ @@ -61,6 +61,8 @@ "tensorflow" ], "devDependencies": { + "@html-eslint/eslint-plugin": "^0.13.2", + "@html-eslint/parser": "^0.13.2", "@microsoft/api-extractor": "^7.29.3", "@tensorflow/tfjs": "^3.19.0", "@tensorflow/tfjs-backend-cpu": "^3.19.0", diff --git a/src/body/blazepose.ts b/src/body/blazepose.ts index 2482a3d1..d9e4c095 100644 --- a/src/body/blazepose.ts +++ b/src/body/blazepose.ts @@ -32,8 +32,8 @@ const sigmoid = (x) => (1 - (1 / (1 + Math.exp(x)))); export async function loadDetect(config: Config): Promise { if (env.initial) models.detector = null; - if (!models.detector && config.body['detector'] && config.body['detector']['modelPath'] || '') { - models.detector = await loadModel(config.body['detector']['modelPath']); + if (!models.detector && config.body['detector'] && config.body['detector'].modelPath || '') { + models.detector = await loadModel(config.body['detector'].modelPath); const inputs = Object.values(models.detector.modelSignature['inputs']); inputSize.detector[0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0; inputSize.detector[1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0; @@ -94,7 +94,7 @@ async function prepareImage(input: Tensor, size: number): Promise { return final; } -function rescaleKeypoints(keypoints: Array, outputSize: [number, number]): Array { +function rescaleKeypoints(keypoints: BodyKeypoint[], outputSize: [number, number]): BodyKeypoint[] { for (const kpt of keypoints) { // first rescale due to padding kpt.position = [ Math.trunc(kpt.position[0] * (outputSize[0] + padding[2][0] + padding[2][1]) / outputSize[0] - padding[2][0]), @@ -120,7 +120,7 @@ function rescaleKeypoints(keypoints: Array, outputSize: [number, n return keypoints; } -async function fixKeypoints(keypoints: Array) { +async function fixKeypoints(keypoints: BodyKeypoint[]) { // palm z-coord is incorrect around near-zero so we approximate it const leftPalm = keypoints.find((k) => k.part === 'leftPalm') as BodyKeypoint; const leftWrist = keypoints.find((k) => k.part === 'leftWrist') as BodyKeypoint; @@ -146,7 +146,7 @@ async function detectLandmarks(input: Tensor, config: Config, outputSize: [numbe const points = await t.ld.data(); const distances = await t.world.data(); Object.keys(t).forEach((tensor) => tf.dispose(t[tensor])); // dont need tensors after this - const keypointsRelative: Array = []; + const keypointsRelative: BodyKeypoint[] = []; const depth = 5; // each points has x,y,z,visibility,presence for (let i = 0; i < points.length / depth; i++) { const score = sigmoid(points[depth * i + 3]); @@ -159,12 +159,12 @@ async function detectLandmarks(input: Tensor, config: Config, outputSize: [numbe } if (poseScore < (config.body.minConfidence || 0)) return null; fixKeypoints(keypointsRelative); - const keypoints: Array = rescaleKeypoints(keypointsRelative, outputSize); // keypoints were relative to input image which is padded + const keypoints: BodyKeypoint[] = rescaleKeypoints(keypointsRelative, outputSize); // keypoints were relative to input image which is padded const kpts = keypoints.map((k) => k.position); const boxes = box.calc(kpts, [outputSize[0], outputSize[1]]); // now find boxes based on rescaled keypoints const annotations: Record = {} as Record; for (const [name, indexes] of Object.entries(coords.connected)) { - const pt: Array = []; + const pt: Point[][] = []; for (let i = 0; i < indexes.length - 1; i++) { const pt0 = keypoints.find((kpt) => kpt.part === indexes[i]); const pt1 = keypoints.find((kpt) => kpt.part === indexes[i + 1]); diff --git a/src/body/blazeposecoords.ts b/src/body/blazeposecoords.ts index 988cd1b1..01f43276 100644 --- a/src/body/blazeposecoords.ts +++ b/src/body/blazeposecoords.ts @@ -1,6 +1,6 @@ /* eslint-disable no-multi-spaces */ -export const kpt: Array = [ +export const kpt: string[] = [ 'nose', // 0 'leftEyeInside', // 1 'leftEye', // 2 diff --git a/src/body/blazeposedetector.ts b/src/body/blazeposedetector.ts index 50fac822..58031e39 100644 --- a/src/body/blazeposedetector.ts +++ b/src/body/blazeposedetector.ts @@ -11,7 +11,7 @@ const numLayers = 5; const strides = [8, 16, 32, 32, 32]; export async function createAnchors() { - const anchors: Array<{ x: number, y: number }> = []; + const anchors: { x: number, y: number }[] = []; let layerId = 0; while (layerId < numLayers) { let anchorCount = 0; @@ -59,10 +59,10 @@ export async function decode(boxesTensor: Tensor, logitsTensor: Tensor, config: t.boxes = decodeBoxes(boxesTensor, anchorTensor); t.scores = tf.sigmoid(logitsTensor); t.argmax = tf.argMax(t.scores); - const i = (await t.argmax.data())[0] as number; + const i = (await t.argmax.data())[0]; const scores = await t.scores.data(); - const detected: Array<{ box: Box, boxRaw: Box, score: number }> = []; - const minScore = (config.body['detector'] && config.body['detector']['minConfidence']) ? config.body['detector']['minConfidence'] : 0; + const detected: { box: Box, boxRaw: Box, score: number }[] = []; + const minScore = (config.body['detector'] && config.body['detector'].minConfidence) ? config.body['detector'].minConfidence : 0; if (scores[i] >= minScore) { const boxes = await t.boxes.array(); const boxRaw: Box = boxes[i]; diff --git a/src/body/efficientpose.ts b/src/body/efficientpose.ts index 478b2fe8..172b56e6 100644 --- a/src/body/efficientpose.ts +++ b/src/body/efficientpose.ts @@ -45,10 +45,9 @@ async function max2d(inputs, minScore): Promise<[number, number, number]> { const y: number = (await div.data())[0]; tf.dispose([reshaped, max, coordinates, mod, div]); return [x, y, newScore]; - } else { - tf.dispose([reshaped, max]); - return [0, 0, newScore]; } + tf.dispose([reshaped, max]); + return [0, 0, newScore]; } export async function predict(image: Tensor, config: Config): Promise { @@ -84,7 +83,7 @@ export async function predict(image: Tensor, config: Config): Promise (config.body?.minConfidence || 0)) { + if (partScore > (config.body.minConfidence || 0)) { cache.keypoints.push({ score: Math.round(100 * partScore) / 100, part: coords.kpt[id] as BodyLandmark, @@ -119,7 +118,7 @@ export async function predict(image: Tensor, config: Config): Promise = []; + const pt: Point[][] = []; for (let i = 0; i < indexes.length - 1; i++) { const pt0 = cache.keypoints.find((kpt) => kpt.part === indexes[i]); const pt1 = cache.keypoints.find((kpt) => kpt.part === indexes[i + 1]); diff --git a/src/body/efficientposecoords.ts b/src/body/efficientposecoords.ts index 9f707b24..ba28080c 100644 --- a/src/body/efficientposecoords.ts +++ b/src/body/efficientposecoords.ts @@ -1,4 +1,4 @@ -export const kpt: Array = [ +export const kpt: string[] = [ 'head', 'neck', 'rightShoulder', diff --git a/src/body/movenet.ts b/src/body/movenet.ts index fa97d6dd..99b51b2e 100644 --- a/src/body/movenet.ts +++ b/src/body/movenet.ts @@ -22,8 +22,8 @@ let skipped = Number.MAX_SAFE_INTEGER; // const boxExpandFact = 1.5; // increase to 150% const cache: { - boxes: Array, // unused - bodies: Array; + boxes: Box[], // unused + bodies: BodyResult[]; last: number, } = { boxes: [], @@ -44,7 +44,7 @@ export async function load(config: Config): Promise { async function parseSinglePose(res, config, image) { const kpt = res[0][0]; - const keypoints: Array = []; + const keypoints: BodyKeypoint[] = []; let score = 0; for (let id = 0; id < kpt.length; id++) { score = kpt[id][2]; @@ -62,11 +62,11 @@ async function parseSinglePose(res, config, image) { } } score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0); - const bodies: Array = []; + const bodies: BodyResult[] = []; const newBox = box.calc(keypoints.map((pt) => pt.position), [image.shape[2], image.shape[1]]); const annotations: Record = {}; for (const [name, indexes] of Object.entries(coords.connected)) { - const pt: Array = []; + const pt: Point[][] = []; for (let i = 0; i < indexes.length - 1; i++) { const pt0 = keypoints.find((kp) => kp.part === indexes[i]); const pt1 = keypoints.find((kp) => kp.part === indexes[i + 1]); @@ -81,12 +81,12 @@ async function parseSinglePose(res, config, image) { } async function parseMultiPose(res, config, image) { - const bodies: Array = []; + const bodies: BodyResult[] = []; for (let id = 0; id < res[0].length; id++) { const kpt = res[0][id]; const totalScore = Math.round(100 * kpt[51 + 4]) / 100; if (totalScore > config.body.minConfidence) { - const keypoints: Array = []; + const keypoints: BodyKeypoint[] = []; for (let i = 0; i < 17; i++) { const score = kpt[3 * i + 2]; if (score > config.body.minConfidence) { @@ -105,7 +105,7 @@ async function parseMultiPose(res, config, image) { // const box: Box = [Math.trunc(boxRaw[0] * (image.shape[2] || 0)), Math.trunc(boxRaw[1] * (image.shape[1] || 0)), Math.trunc(boxRaw[2] * (image.shape[2] || 0)), Math.trunc(boxRaw[3] * (image.shape[1] || 0))]; const annotations: Record = {} as Record; for (const [name, indexes] of Object.entries(coords.connected)) { - const pt: Array = []; + const pt: Point[][] = []; for (let i = 0; i < indexes.length - 1; i++) { const pt0 = keypoints.find((kp) => kp.part === indexes[i]); const pt1 = keypoints.find((kp) => kp.part === indexes[i + 1]); @@ -124,7 +124,7 @@ async function parseMultiPose(res, config, image) { } export async function predict(input: Tensor, config: Config): Promise { - if (!model || !model?.inputs[0].shape) return []; // something is wrong with the model + if (!model || !model.inputs[0].shape) return []; // something is wrong with the model if (!config.skipAllowed) cache.boxes.length = 0; // allowed to use cache or not skipped++; // increment skip frames const skipTime = (config.body.skipTime || 0) > (now() - cache.last); diff --git a/src/body/movenetcoords.ts b/src/body/movenetcoords.ts index 0aa1075c..033a52b4 100644 --- a/src/body/movenetcoords.ts +++ b/src/body/movenetcoords.ts @@ -1,4 +1,4 @@ -export const kpt: Array = [ // used to create part labels +export const kpt: string[] = [ // used to create part labels 'nose', 'leftEye', 'rightEye', @@ -18,7 +18,7 @@ export const kpt: Array = [ // used to create part labels 'rightAnkle', ]; -export const horizontal: Array = [ // used to fix left vs right +export const horizontal: string[][] = [ // used to fix left vs right ['leftEye', 'rightEye'], ['leftEar', 'rightEar'], ['leftShoulder', 'rightShoulder'], @@ -29,14 +29,14 @@ export const horizontal: Array = [ // used to fix left vs right ['leftAnkle', 'rightAnkle'], ]; -export const vertical: Array = [ // used to remove unlikely keypoint positions +export const vertical: string[][] = [ // used to remove unlikely keypoint positions ['leftKnee', 'leftShoulder'], ['rightKnee', 'rightShoulder'], ['leftAnkle', 'leftKnee'], ['rightAnkle', 'rightKnee'], ]; -export const relative: Array = [ // used to match relative body parts +export const relative: string[][][] = [ // used to match relative body parts [['leftHip', 'rightHip'], ['leftShoulder', 'rightShoulder']], [['leftElbow', 'rightElbow'], ['leftShoulder', 'rightShoulder']], ]; diff --git a/src/body/movenetfix.ts b/src/body/movenetfix.ts index b09768e7..d5257cbd 100644 --- a/src/body/movenetfix.ts +++ b/src/body/movenetfix.ts @@ -7,7 +7,7 @@ import type { Tensor } from '../tfjs/types'; const maxJitter = 0.005; // default allowed jitter is within 0.5% const cache: { - keypoints: Array, + keypoints: BodyKeypoint[], padding: [number, number][]; } = { keypoints: [], @@ -57,7 +57,7 @@ export function bodyParts(body: BodyResult) { // model sometimes mixes up left v } } -export function jitter(keypoints: Array): Array { +export function jitter(keypoints: BodyKeypoint[]): BodyKeypoint[] { for (let i = 0; i < keypoints.length; i++) { if (keypoints[i] && cache.keypoints[i]) { const diff = [Math.abs(keypoints[i].positionRaw[0] - cache.keypoints[i].positionRaw[0]), Math.abs(keypoints[i].positionRaw[1] - cache.keypoints[i].positionRaw[1])]; diff --git a/src/body/posenet.ts b/src/body/posenet.ts index 7154c0eb..99710711 100644 --- a/src/body/posenet.ts +++ b/src/body/posenet.ts @@ -134,7 +134,7 @@ function getInstanceScore(existingPoses, keypoints) { } export function decode(offsets, scores, displacementsFwd, displacementsBwd, maxDetected, minConfidence) { - const poses: Array<{ keypoints, box: Box, score: number }> = []; + const poses: { keypoints, box: Box, score: number }[] = []; const queue = buildPartWithScoreQueue(minConfidence, scores); // Generate at most maxDetected object instances per image in decreasing root part score order. while (poses.length < maxDetected && !queue.empty()) { @@ -163,7 +163,7 @@ export async function predict(input: Tensor, config: Config): Promise = model.execute(normalized, poseNetOutputs) as Array; + const results: Tensor[] = model.execute(normalized, poseNetOutputs) as Tensor[]; const results3d = results.map((y) => tf.squeeze(y, [0])); results3d[1] = tf.sigmoid(results3d[1]); // apply sigmoid on scores return results3d; @@ -174,7 +174,7 @@ export async function predict(input: Tensor, config: Config): Promise { +export function scalePoses(poses, [height, width], [inputResolutionHeight, inputResolutionWidth]): BodyResult[] { const scaleY = height / inputResolutionHeight; const scaleX = width / inputResolutionWidth; const scalePose = (pose, i): BodyResult => ({ @@ -90,7 +90,7 @@ export function scalePoses(poses, [height, width], [inputResolutionHeight, input // algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort export class MaxHeap { - priorityQueue: Array; // don't touch + priorityQueue: unknown[]; // don't touch numberOfElements: number; getElementValue: unknown; // function call diff --git a/src/config.ts b/src/config.ts index ea9c3238..3098f152 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,4 +1,3 @@ -/* eslint-disable indent */ /* eslint-disable no-multi-spaces */ /** Generic config type inherited by all module types */ diff --git a/src/draw/body.ts b/src/draw/body.ts index 3c1fec15..bbeb547d 100644 --- a/src/draw/body.ts +++ b/src/draw/body.ts @@ -5,7 +5,7 @@ import type { BodyResult } from '../result'; import type { AnyCanvas, DrawOptions } from '../exports'; /** draw detected bodies */ -export async function body(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) { +export async function body(inCanvas: AnyCanvas, result: BodyResult[], drawOptions?: Partial) { const localOptions = mergeDeep(options, drawOptions); if (!result || !inCanvas) return; const ctx = getCanvasContext(inCanvas); @@ -16,7 +16,7 @@ export async function body(inCanvas: AnyCanvas, result: Array, drawO ctx.fillStyle = localOptions.color; ctx.lineWidth = localOptions.lineWidth; ctx.font = localOptions.font; - if (localOptions.drawBoxes && result[i].box && result[i].box?.length === 4) { + if (localOptions.drawBoxes && result[i].box && result[i].box.length === 4) { rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions); if (localOptions.drawLabels) { if (localOptions.shadowColor && localOptions.shadowColor !== '') { diff --git a/src/draw/draw.ts b/src/draw/draw.ts index 5b519999..da8fb738 100644 --- a/src/draw/draw.ts +++ b/src/draw/draw.ts @@ -24,7 +24,7 @@ export { object } from './object'; export { gesture } from './gesture'; /** draw combined person results instead of individual detection result objects */ -export async function person(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) { +export async function person(inCanvas: AnyCanvas, result: PersonResult[], drawOptions?: Partial) { const localOptions = mergeDeep(options, drawOptions); if (!result || !inCanvas) return; const ctx = getCanvasContext(inCanvas); diff --git a/src/draw/face.ts b/src/draw/face.ts index a7d554a4..24b771ee 100644 --- a/src/draw/face.ts +++ b/src/draw/face.ts @@ -44,24 +44,24 @@ function drawLabels(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanv function drawIrisElipse(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D) { // iris: array[center, left, top, right, bottom] - if (f.annotations && f.annotations['leftEyeIris'] && f.annotations['leftEyeIris'][0]) { + if (f.annotations && f.annotations.leftEyeIris && f.annotations.leftEyeIris[0]) { ctx.strokeStyle = opt.useDepth ? 'rgba(255, 200, 255, 0.3)' : opt.color; ctx.beginPath(); - const sizeX = Math.abs(f.annotations['leftEyeIris'][3][0] - f.annotations['leftEyeIris'][1][0]) / 2; - const sizeY = Math.abs(f.annotations['leftEyeIris'][4][1] - f.annotations['leftEyeIris'][2][1]) / 2; - ctx.ellipse(f.annotations['leftEyeIris'][0][0], f.annotations['leftEyeIris'][0][1], sizeX, sizeY, 0, 0, 2 * Math.PI); + const sizeX = Math.abs(f.annotations.leftEyeIris[3][0] - f.annotations.leftEyeIris[1][0]) / 2; + const sizeY = Math.abs(f.annotations.leftEyeIris[4][1] - f.annotations.leftEyeIris[2][1]) / 2; + ctx.ellipse(f.annotations.leftEyeIris[0][0], f.annotations.leftEyeIris[0][1], sizeX, sizeY, 0, 0, 2 * Math.PI); ctx.stroke(); if (opt.fillPolygons) { ctx.fillStyle = opt.useDepth ? 'rgba(255, 255, 200, 0.3)' : opt.color; ctx.fill(); } } - if (f.annotations && f.annotations['rightEyeIris'] && f.annotations['rightEyeIris'][0]) { + if (f.annotations && f.annotations.rightEyeIris && f.annotations.rightEyeIris[0]) { ctx.strokeStyle = opt.useDepth ? 'rgba(255, 200, 255, 0.3)' : opt.color; ctx.beginPath(); - const sizeX = Math.abs(f.annotations['rightEyeIris'][3][0] - f.annotations['rightEyeIris'][1][0]) / 2; - const sizeY = Math.abs(f.annotations['rightEyeIris'][4][1] - f.annotations['rightEyeIris'][2][1]) / 2; - ctx.ellipse(f.annotations['rightEyeIris'][0][0], f.annotations['rightEyeIris'][0][1], sizeX, sizeY, 0, 0, 2 * Math.PI); + const sizeX = Math.abs(f.annotations.rightEyeIris[3][0] - f.annotations.rightEyeIris[1][0]) / 2; + const sizeY = Math.abs(f.annotations.rightEyeIris[4][1] - f.annotations.rightEyeIris[2][1]) / 2; + ctx.ellipse(f.annotations.rightEyeIris[0][0], f.annotations.rightEyeIris[0][1], sizeX, sizeY, 0, 0, 2 * Math.PI); ctx.stroke(); if (opt.fillPolygons) { ctx.fillStyle = opt.useDepth ? 'rgba(255, 255, 200, 0.3)' : opt.color; @@ -95,19 +95,19 @@ function drawGazeSpheres(f: FaceResult, ctx: CanvasRenderingContext2D | Offscree } function drawGazeArrows(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D) { - if (opt.drawGaze && f.rotation?.gaze?.strength && f.rotation?.gaze?.bearing && f.annotations['leftEyeIris'] && f.annotations['rightEyeIris'] && f.annotations['leftEyeIris'][0] && f.annotations['rightEyeIris'][0]) { + if (opt.drawGaze && f.rotation?.gaze.strength && f.rotation.gaze.bearing && f.annotations.leftEyeIris && f.annotations.rightEyeIris && f.annotations.leftEyeIris[0] && f.annotations.rightEyeIris[0]) { ctx.strokeStyle = 'pink'; ctx.fillStyle = 'pink'; const leftGaze = [ - f.annotations['leftEyeIris'][0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]), - f.annotations['leftEyeIris'][0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]), + f.annotations.leftEyeIris[0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]), + f.annotations.leftEyeIris[0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]), ]; - arrow(ctx, [f.annotations['leftEyeIris'][0][0], f.annotations['leftEyeIris'][0][1]], [leftGaze[0], leftGaze[1]], 4); + arrow(ctx, [f.annotations.leftEyeIris[0][0], f.annotations.leftEyeIris[0][1]], [leftGaze[0], leftGaze[1]], 4); const rightGaze = [ - f.annotations['rightEyeIris'][0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]), - f.annotations['rightEyeIris'][0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]), + f.annotations.rightEyeIris[0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]), + f.annotations.rightEyeIris[0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]), ]; - arrow(ctx, [f.annotations['rightEyeIris'][0][0], f.annotations['rightEyeIris'][0][1]], [rightGaze[0], rightGaze[1]], 4); + arrow(ctx, [f.annotations.rightEyeIris[0][0], f.annotations.rightEyeIris[0][1]], [rightGaze[0], rightGaze[1]], 4); } } @@ -149,7 +149,7 @@ function drawFaceBoxes(f: FaceResult, ctx) { } /** draw detected faces */ -export async function face(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) { +export async function face(inCanvas: AnyCanvas, result: FaceResult[], drawOptions?: Partial) { opt = mergeDeep(options, drawOptions); if (!result || !inCanvas) return; const ctx = getCanvasContext(inCanvas); diff --git a/src/draw/gesture.ts b/src/draw/gesture.ts index a7dc7682..61d47fd4 100644 --- a/src/draw/gesture.ts +++ b/src/draw/gesture.ts @@ -5,7 +5,7 @@ import type { GestureResult } from '../result'; import type { AnyCanvas, DrawOptions } from '../exports'; /** draw detected gestures */ -export async function gesture(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) { +export async function gesture(inCanvas: AnyCanvas, result: GestureResult[], drawOptions?: Partial) { const localOptions = mergeDeep(options, drawOptions); if (!result || !inCanvas) return; if (localOptions.drawGestures) { diff --git a/src/draw/hand.ts b/src/draw/hand.ts index 253fbfda..bffe5cc8 100644 --- a/src/draw/hand.ts +++ b/src/draw/hand.ts @@ -5,7 +5,7 @@ import type { HandResult } from '../result'; import type { AnyCanvas, DrawOptions, Point } from '../exports'; /** draw detected hands */ -export async function hand(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) { +export async function hand(inCanvas: AnyCanvas, result: HandResult[], drawOptions?: Partial) { const localOptions = mergeDeep(options, drawOptions); if (!result || !inCanvas) return; const ctx = getCanvasContext(inCanvas); @@ -36,22 +36,22 @@ export async function hand(inCanvas: AnyCanvas, result: Array, drawO } } if (localOptions.drawLabels && h.annotations) { - const addHandLabel = (part: Array, title: string) => { + const addHandLabel = (part: Point[], title: string) => { if (!part || part.length === 0 || !part[0]) return; const z = part[part.length - 1][2] || -256; ctx.fillStyle = colorDepth(z, localOptions); ctx.fillText(title, part[part.length - 1][0] + 4, part[part.length - 1][1] + 4); }; ctx.font = localOptions.font; - addHandLabel(h.annotations['index'], 'index'); - addHandLabel(h.annotations['middle'], 'middle'); - addHandLabel(h.annotations['ring'], 'ring'); - addHandLabel(h.annotations['pinky'], 'pinky'); - addHandLabel(h.annotations['thumb'], 'thumb'); - addHandLabel(h.annotations['palm'], 'palm'); + addHandLabel(h.annotations.index, 'index'); + addHandLabel(h.annotations.middle, 'middle'); + addHandLabel(h.annotations.ring, 'ring'); + addHandLabel(h.annotations.pinky, 'pinky'); + addHandLabel(h.annotations.thumb, 'thumb'); + addHandLabel(h.annotations.palm, 'palm'); } if (localOptions.drawPolygons && h.annotations) { - const addHandLine = (part: Array) => { + const addHandLine = (part: Point[]) => { if (!part || part.length === 0 || !part[0]) return; for (let i = 0; i < part.length; i++) { ctx.beginPath(); @@ -63,11 +63,11 @@ export async function hand(inCanvas: AnyCanvas, result: Array, drawO } }; ctx.lineWidth = localOptions.lineWidth; - addHandLine(h.annotations['index']); - addHandLine(h.annotations['middle']); - addHandLine(h.annotations['ring']); - addHandLine(h.annotations['pinky']); - addHandLine(h.annotations['thumb']); + addHandLine(h.annotations.index); + addHandLine(h.annotations.middle); + addHandLine(h.annotations.ring); + addHandLine(h.annotations.pinky); + addHandLine(h.annotations.thumb); // addPart(h.annotations.palm); } } diff --git a/src/draw/object.ts b/src/draw/object.ts index 8664e083..65015202 100644 --- a/src/draw/object.ts +++ b/src/draw/object.ts @@ -5,7 +5,7 @@ import type { ObjectResult } from '../result'; import type { AnyCanvas, DrawOptions } from '../exports'; /** draw detected objects */ -export async function object(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial) { +export async function object(inCanvas: AnyCanvas, result: ObjectResult[], drawOptions?: Partial) { const localOptions = mergeDeep(options, drawOptions); if (!result || !inCanvas) return; const ctx = getCanvasContext(inCanvas); diff --git a/src/draw/options.ts b/src/draw/options.ts index 79cde324..e2bbd8e8 100644 --- a/src/draw/options.ts +++ b/src/draw/options.ts @@ -1,7 +1,7 @@ /** Draw Options * - Accessed via `human.draw.options` or provided per each draw method as the drawOptions optional parameter */ -export type DrawOptions = { +export interface DrawOptions { /** draw line color */ color: string, /** alpha value used for lines */ diff --git a/src/face/angles.ts b/src/face/angles.ts index d219264c..4a97e7b6 100644 --- a/src/face/angles.ts +++ b/src/face/angles.ts @@ -4,7 +4,7 @@ type Vector = [number, number, number]; const calculateGaze = (face: FaceResult): { bearing: number, strength: number } => { const radians = (pt1: Point, pt2: Point) => Math.atan2(pt1[1] - pt2[1], pt1[0] - pt2[0]); // function to calculate angle between any two points - if (!face.annotations['rightEyeIris'] || !face.annotations['leftEyeIris']) return { bearing: 0, strength: 0 }; + if (!face.annotations.rightEyeIris || !face.annotations.leftEyeIris) return { bearing: 0, strength: 0 }; const offsetIris = [0, -0.1]; // iris center may not align with average of eye extremes const eyeRatio = 1; // factor to normalize changes x vs y @@ -54,8 +54,7 @@ export const calculateFaceAngle = (face: FaceResult, imageSize: [number, number] }; // 3x3 rotation matrix to Euler angles based on https://www.geometrictools.com/Documentation/EulerAngles.pdf const rotationMatrixToEulerAngle = (r: number[]): { pitch: number, yaw: number, roll: number } => { - // eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars - const [r00, _r01, _r02, r10, r11, r12, r20, r21, r22] = r; + const [r00, _r01, _r02, r10, r11, r12, r20, r21, r22] = r; // eslint-disable-line @typescript-eslint/no-unused-vars let thetaX: number; let thetaY: number; let thetaZ: number; @@ -74,9 +73,9 @@ export const calculateFaceAngle = (face: FaceResult, imageSize: [number, number] thetaY = Math.atan2(r21, r22); thetaX = 0; } - if (isNaN(thetaX)) thetaX = 0; - if (isNaN(thetaY)) thetaY = 0; - if (isNaN(thetaZ)) thetaZ = 0; + if (Number.isNaN(thetaX)) thetaX = 0; + if (Number.isNaN(thetaY)) thetaY = 0; + if (Number.isNaN(thetaZ)) thetaZ = 0; return { pitch: 2 * -thetaX, yaw: 2 * -thetaY, roll: 2 * -thetaZ }; }; @@ -99,18 +98,18 @@ export const calculateFaceAngle = (face: FaceResult, imageSize: [number, number] // top, bottom, left, right const pts: Point[] = [mesh[10], mesh[152], mesh[234], mesh[454]].map((pt) => [pt[0] * imageSize[0] / size, pt[1] * imageSize[1] / size, pt[2]] as Point); // make the xyz coordinates proportional, independent of the image/box size - const y_axis = normalize(subVectors(pts[1] as Vector, pts[0] as Vector)); - let x_axis = normalize(subVectors(pts[3] as Vector, pts[2] as Vector)); - const z_axis = normalize(crossVectors(x_axis, y_axis)); - // adjust x_axis to make sure that all axes are perpendicular to each other - x_axis = crossVectors(y_axis, z_axis); + const yAxis = normalize(subVectors(pts[1] as Vector, pts[0] as Vector)); + let xAxis = normalize(subVectors(pts[3] as Vector, pts[2] as Vector)); + const zAxis = normalize(crossVectors(xAxis, yAxis)); + // adjust xAxis to make sure that all axes are perpendicular to each other + xAxis = crossVectors(yAxis, zAxis); // Rotation Matrix from Axis Vectors - http://renderdan.blogspot.com/2006/05/rotation-matrix-from-axis-vectors.html // 3x3 rotation matrix is flatten to array in row-major order. Note that the rotation represented by this matrix is inverted. const matrix: [number, number, number, number, number, number, number, number, number] = [ - x_axis[0], x_axis[1], x_axis[2], - y_axis[0], y_axis[1], y_axis[2], - z_axis[0], z_axis[1], z_axis[2], + xAxis[0], xAxis[1], xAxis[2], + yAxis[0], yAxis[1], yAxis[2], + zAxis[0], zAxis[1], zAxis[2], ]; const angle = rotationMatrixToEulerAngle(matrix); // const angle = meshToEulerAngle(mesh); diff --git a/src/face/antispoof.ts b/src/face/antispoof.ts index 029370c4..0fc3cd8f 100644 --- a/src/face/antispoof.ts +++ b/src/face/antispoof.ts @@ -10,7 +10,7 @@ import { loadModel } from '../tfjs/load'; import { env } from '../util/env'; let model: GraphModel | null; -const cached: Array = []; +const cached: number[] = []; let skipped = Number.MAX_SAFE_INTEGER; let lastCount = 0; let lastTime = 0; diff --git a/src/face/blazeface.ts b/src/face/blazeface.ts index 00f97d76..2941b039 100644 --- a/src/face/blazeface.ts +++ b/src/face/blazeface.ts @@ -20,7 +20,7 @@ let anchors: Tensor | null = null; let inputSize = 0; let inputSizeT: Tensor | null = null; -type DetectBox = { startPoint: Point, endPoint: Point, landmarks: Array, confidence: number }; +interface DetectBox { startPoint: Point, endPoint: Point, landmarks: Point[], confidence: number } export const size = () => inputSize; @@ -77,7 +77,7 @@ export async function getBoxes(inputImage: Tensor, config: Config) { t.scores = tf.squeeze(t.sigmoid); t.nms = await tf.image.nonMaxSuppressionAsync(t.boxes, t.scores, (config.face.detector?.maxDetected || 0), (config.face.detector?.iouThreshold || 0), (config.face.detector?.minConfidence || 0)); const nms = await t.nms.array() as number[]; - const boxes: Array = []; + const boxes: DetectBox[] = []; const scores = await t.scores.data(); for (let i = 0; i < nms.length; i++) { const confidence = scores[nms[i]]; diff --git a/src/face/constants.ts b/src/face/constants.ts index 0b26b394..f724fddf 100644 --- a/src/face/constants.ts +++ b/src/face/constants.ts @@ -1,7 +1,7 @@ // @tensorflow/tfjs-models/face-landmark-detection/src/constants.ts // https://github.com/google/mediapipe/mediapipe/python/solutions/face_mesh_connections.py -type PairArray = Array<[number, number]>; +type PairArray = [number, number][]; const LIPS_CONNECTIONS: PairArray = [ [61, 146], [146, 91], [91, 181], [181, 84], [84, 17], [17, 314], [314, 405], [405, 321], [321, 375], [375, 291], [61, 185], [185, 40], [40, 39], [39, 37], [37, 0], [0, 267], [267, 269], [269, 270], [270, 409], [409, 291], @@ -187,7 +187,7 @@ export const MEDIAPIPE_FACE_MESH_KEYPOINTS_BY_CONTOUR = { faceOval: connectionsToIndices(FACE_OVAL_CONNECTIONS), }; -const indexLabelPairs: Array<[number, string]> = Object.entries(MEDIAPIPE_FACE_MESH_KEYPOINTS_BY_CONTOUR) +const indexLabelPairs: [number, string][] = Object.entries(MEDIAPIPE_FACE_MESH_KEYPOINTS_BY_CONTOUR) .map(([label, indices]) => indices.map((index) => [index, label] as [number, string])) .flat(); diff --git a/src/face/face.ts b/src/face/face.ts index eb9a2c49..efa3a5bb 100644 --- a/src/face/face.ts +++ b/src/face/face.ts @@ -22,11 +22,10 @@ import type { Tensor } from '../tfjs/types'; import type { Human } from '../human'; import { calculateFaceAngle } from './angles'; -type DescRes = { age: number, gender: Gender, genderScore: number, descriptor: number[], race?: { score: number, race: Race }[] }; +interface DescRes { age: number, gender: Gender, genderScore: number, descriptor: number[], race?: { score: number, race: Race }[] } export const detectFace = async (instance: Human /* instance of human */, input: Tensor): Promise => { // run facemesh, includes blazeface and iris - // eslint-disable-next-line no-async-promise-executor let timeStamp: number = now(); let ageRes: { age: number } | Promise<{ age: number }> | null; let gearRes: gear.GearType | Promise | null; @@ -38,7 +37,7 @@ export const detectFace = async (instance: Human /* instance of human */, input: let livenessRes: number | Promise | null; let descRes: DescRes | Promise | null; - const faceRes: Array = []; + const faceRes: FaceResult[] = []; instance.state = 'run:face'; const faces = await facemesh.predict(input, instance.config); @@ -51,7 +50,7 @@ export const detectFace = async (instance: Human /* instance of human */, input: // is something went wrong, skip the face // @ts-ignore possibly undefied - if (!faces[i].tensor || faces[i].tensor['isDisposedInternal']) { + if (!faces[i].tensor || faces[i].tensor.isDisposedInternal) { log('Face object is disposed:', faces[i].tensor); continue; } @@ -60,7 +59,7 @@ export const detectFace = async (instance: Human /* instance of human */, input: if (instance.config.face.detector?.mask) { const masked = await mask.mask(faces[i]); tf.dispose(faces[i].tensor); - faces[i].tensor = masked as Tensor; + if (masked) faces[i].tensor = masked; } // calculate face angles @@ -105,11 +104,11 @@ export const detectFace = async (instance: Human /* instance of human */, input: // run gear, inherits face from blazeface instance.analyze('Start GEAR:'); if (instance.config.async) { - gearRes = instance.config.face['gear']?.enabled ? gear.predict(faces[i].tensor || tf.tensor([]), instance.config, i, faces.length) : null; + gearRes = instance.config.face.gear?.enabled ? gear.predict(faces[i].tensor || tf.tensor([]), instance.config, i, faces.length) : null; } else { instance.state = 'run:gear'; timeStamp = now(); - gearRes = instance.config.face['gear']?.enabled ? await gear.predict(faces[i].tensor || tf.tensor([]), instance.config, i, faces.length) : null; + gearRes = instance.config.face.gear?.enabled ? await gear.predict(faces[i].tensor || tf.tensor([]), instance.config, i, faces.length) : null; instance.performance.gear = Math.trunc(now() - timeStamp); } instance.analyze('End GEAR:'); @@ -178,7 +177,7 @@ export const detectFace = async (instance: Human /* instance of human */, input: genderScore: (genderRes as { gender: Gender, genderScore: number }).genderScore, }; } - if (instance.config.face['gear']?.enabled && gearRes) { // override age/gender/race if gear model is used + if (instance.config.face.gear?.enabled && gearRes) { // override age/gender/race if gear model is used descRes = { ...(descRes as DescRes), age: (gearRes as gear.GearType).age, @@ -218,12 +217,12 @@ export const detectFace = async (instance: Human /* instance of human */, input: ...faces[i], id: i, }; - if ((descRes as DescRes)?.age) res.age = (descRes as DescRes).age as number; - if ((descRes as DescRes)?.gender) res.gender = (descRes as DescRes).gender as Gender; - if ((descRes as DescRes)?.genderScore) res.genderScore = (descRes as DescRes)?.genderScore as number; - if ((descRes as DescRes)?.descriptor) res.embedding = (descRes as DescRes)?.descriptor as Array; - if ((descRes as DescRes)?.race) res.race = (descRes as DescRes)?.race as { score: number, race: Race }[]; - if (emotionRes) res.emotion = emotionRes as Array<{ score: number, emotion: Emotion }>; + if ((descRes as DescRes).age) res.age = (descRes as DescRes).age; + if ((descRes as DescRes).gender) res.gender = (descRes as DescRes).gender; + if ((descRes as DescRes).genderScore) res.genderScore = (descRes as DescRes).genderScore; + if ((descRes as DescRes).descriptor) res.embedding = (descRes as DescRes).descriptor; + if ((descRes as DescRes).race) res.race = (descRes as DescRes).race as { score: number, race: Race }[]; + if (emotionRes) res.emotion = emotionRes as { score: number, emotion: Emotion }[]; if (antispoofRes) res.real = antispoofRes as number; if (livenessRes) res.live = livenessRes as number; if (irisSize && irisSize !== 0) res.iris = Math.trunc(500 / irisSize / 11.7) / 100; diff --git a/src/face/faceboxes.ts b/src/face/faceboxes.ts index d3377933..bc35dd59 100644 --- a/src/face/faceboxes.ts +++ b/src/face/faceboxes.ts @@ -23,7 +23,7 @@ export class FaceBoxes { async estimateFaces(input, config) { if (config) this.config = config; - const results: Array<{ confidence: number, box: Box, boxRaw: Box, image: Tensor }> = []; + const results: { confidence: number, box: Box, boxRaw: Box, image: Tensor }[] = []; const resizeT = tf.image.resizeBilinear(input, [this.inputSize, this.inputSize]); const castT = resizeT.toInt(); const [scoresT, boxesT, numT] = await this.model.executeAsync(castT) as Tensor[]; @@ -37,7 +37,7 @@ export class FaceBoxes { castT.dispose(); resizeT.dispose(); for (const i in boxes) { - if (scores[i] && scores[i] > (this.config.face?.detector?.minConfidence || 0.1)) { + if (scores[i] && scores[i] > (this.config.face.detector?.minConfidence || 0.1)) { const crop = [boxes[i][0] / this.enlarge, boxes[i][1] / this.enlarge, boxes[i][2] * this.enlarge, boxes[i][3] * this.enlarge]; const boxRaw: Box = [crop[1], crop[0], (crop[3]) - (crop[1]), (crop[2]) - (crop[0])]; const box: Box = [ diff --git a/src/face/facemesh.ts b/src/face/facemesh.ts index d4fc1230..8abf7206 100644 --- a/src/face/facemesh.ts +++ b/src/face/facemesh.ts @@ -21,7 +21,7 @@ import type { GraphModel, Tensor } from '../tfjs/types'; import type { FaceResult, FaceLandmark, Point } from '../result'; import type { Config } from '../config'; -type DetectBox = { startPoint: Point, endPoint: Point, landmarks: Array, confidence: number }; +interface DetectBox { startPoint: Point, endPoint: Point, landmarks: Point[], confidence: number } const cache = { boxes: [] as DetectBox[], @@ -43,9 +43,10 @@ export async function predict(input: Tensor, config: Config): Promise = []; - const newCache: Array = []; + const faces: FaceResult[] = []; + const newCache: DetectBox[] = []; let id = 0; + const size = inputSize; for (let i = 0; i < cache.boxes.length; i++) { const box = cache.boxes[i]; let angle = 0; @@ -66,10 +67,10 @@ export async function predict(input: Tensor, config: Config): Promise [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]); + face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / size]); for (const key of Object.keys(coords.blazeFaceLandmarks)) { face.annotations[key] = [face.mesh[coords.blazeFaceLandmarks[key] as number]]; // add annotations } @@ -91,14 +92,14 @@ export async function predict(input: Tensor, config: Config): Promise; + const results = model.execute(face.tensor as Tensor) as Tensor[]; const confidenceT = results.find((t) => t.shape[t.shape.length - 1] === 1) as Tensor; const faceConfidence = await confidenceT.data(); face.faceScore = Math.round(100 * faceConfidence[0]) / 100; if (face.faceScore < (config.face.detector?.minConfidence || 1)) { // low confidence in detected mesh box.confidence = face.faceScore; // reset confidence of cached box - if (config.face.mesh?.keepInvalid) { + if (config.face.mesh.keepInvalid) { face.box = util.clampBox(box, input); face.boxRaw = util.getRawBox(box, input); face.score = face.boxScore; @@ -106,7 +107,7 @@ export async function predict(input: Tensor, config: Config): Promise [pt[0] / (input.shape[2] || 1), pt[1] / (input.shape[1] || 1), (pt[2] || 0) / inputSize]); + face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 1), pt[1] / (input.shape[1] || 1), (pt[2] || 0) / size]); for (const key of Object.keys(coords.blazeFaceLandmarks)) { face.annotations[key] = [face.mesh[coords.blazeFaceLandmarks[key] as number]]; // add annotations } @@ -122,7 +123,7 @@ export async function predict(input: Tensor, config: Config): Promise [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]); + face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / size]); for (const key of Object.keys(coords.meshAnnotations)) face.annotations[key] = coords.meshAnnotations[key].map((index) => face.mesh[index]); // add annotations face.score = face.faceScore; const calculatedBox = { ...util.calculateFaceBox(face.mesh, box), confidence: box.confidence, landmarks: box.landmarks }; @@ -148,13 +149,11 @@ export async function predict(input: Tensor, config: Config): Promise { if (env.initial) model = null; - // @ts-ignore private property - if (config?.face?.attention?.enabled && model?.signature) { - // @ts-ignore private property - if (Object.keys(model?.signature?.outputs || {}).length < 6) model = null; + if (config.face.attention?.enabled && model?.['signature']) { + if (Object.keys(model?.['signature']?.outputs || {}).length < 6) model = null; } if (!model) { - if (config.face.attention?.enabled) model = await loadModel(config.face.attention?.modelPath); + if (config.face.attention?.enabled) model = await loadModel(config.face.attention.modelPath); else model = await loadModel(config.face.mesh?.modelPath); } else if (config.debug) { log('cached model:', model['modelUrl']); diff --git a/src/face/facemeshcoords.ts b/src/face/facemeshcoords.ts index 5c4e39b2..bbd0d0c1 100644 --- a/src/face/facemeshcoords.ts +++ b/src/face/facemeshcoords.ts @@ -53,7 +53,7 @@ export const meshAnnotations: Record = { export const meshLandmarks: Record = { count: 468, mouth: 13, - symmetryLine: [13, meshAnnotations['midwayBetweenEyes'][0]], + symmetryLine: [13, meshAnnotations.midwayBetweenEyes[0]], }; export const blazeFaceLandmarks: Record = { @@ -66,7 +66,7 @@ export const blazeFaceLandmarks: Record = { symmetryLine: [3, 2], }; -export const irisIndices: Array<{ key: string, indices: number[] }> = [ // A mapping from facemesh model keypoints to iris model keypoints. +export const irisIndices: { key: string, indices: number[] }[] = [ // A mapping from facemesh model keypoints to iris model keypoints. { key: 'EyeUpper0', indices: [9, 10, 11, 12, 13, 14, 15] }, // 7 x 3d { key: 'EyeUpper1', indices: [25, 26, 27, 28, 29, 30, 31] }, // 7 x 3d { key: 'EyeUpper2', indices: [41, 42, 43, 44, 45, 46, 47] }, // 7 x 3d @@ -549,7 +549,7 @@ export const UV468: [number, number][] = [ [0.723330020904541, 0.363372981548309], ]; -export const TRI468: Array = [ +export const TRI468: number[] = [ 127, 34, 139, 11, 0, 37, 232, 231, 120, 72, 37, 39, 128, 121, 47, 232, 121, 128, 104, 69, 67, 175, 171, 148, 157, 154, 155, 118, 50, 101, 73, 39, 40, 9, 151, 108, 48, 115, 131, 194, 204, 211, 74, 40, 185, 80, 42, 183, 40, 92, 186, 230, 229, 118, 202, 212, 214, 83, 18, 17, 76, 61, 146, 160, 29, 30, 56, 157, 173, 106, 204, 194, 135, 214, 192, 203, 165, 98, 21, 71, 68, 51, 45, 4, 144, 24, 23, 77, 146, 91, 205, 50, 187, 201, 200, 18, 91, 106, 182, 90, 91, @@ -635,7 +635,7 @@ export const TRI468: Array = [ 259, 443, 259, 260, 444, 260, 467, 445, 309, 459, 250, 305, 289, 290, 305, 290, 460, 401, 376, 435, 309, 250, 392, 376, 411, 433, 453, 341, 464, 357, 453, 465, 343, 357, 412, 437, 343, 399, 344, 360, 440, 420, 437, 456, 360, 420, 363, 361, 401, 288, 265, 372, 353, 390, 339, 249, 339, 448, 255]; -export const TRI68: Array = [0, 1, 36, 0, 36, 17, 1, 2, 41, 1, 41, 36, 2, 3, 31, 2, 31, 41, 3, 4, 48, 3, 48, 31, 4, 5, 48, 5, 6, 48, 6, 7, 59, 6, 59, 48, 7, 8, 58, 7, 58, 59, +export const TRI68: number[] = [0, 1, 36, 0, 36, 17, 1, 2, 41, 1, 41, 36, 2, 3, 31, 2, 31, 41, 3, 4, 48, 3, 48, 31, 4, 5, 48, 5, 6, 48, 6, 7, 59, 6, 59, 48, 7, 8, 58, 7, 58, 59, 8, 9, 56, 8, 56, 57, 8, 57, 58, 9, 10, 55, 9, 55, 56, 10, 11, 54, 10, 54, 55, 11, 12, 54, 12, 13, 54, 13, 14, 35, 13, 35, 54, 14, 15, 46, 14, 46, 35, 15, 16, 45, 15, 45, 46, 16, 26, 45, 17, 36, 18, 18, 37, 19, 18, 36, 37, 19, 38, 20, 19, 37, 38, 20, 39, 21, 20, 38, 39, 21, 39, 27, 22, 42, 23, 22, 27, 42, 23, 43, 24, 23, 42, 43, 24, 44, 25, 24, 43, 44, 25, 45, 26, 25, 44, 45, 27, 39, 28, 27, 28, 42, 28, 39, 29, 28, 29, 42, 29, 31, 30, 29, 30, 35, 29, 40, 31, 29, 35, 47, 29, @@ -644,7 +644,7 @@ export const TRI68: Array = [0, 1, 36, 0, 36, 17, 1, 2, 41, 1, 41, 36, 2 48, 59, 60, 49, 61, 50, 49, 60, 61, 50, 62, 51, 50, 61, 62, 51, 62, 52, 52, 63, 53, 52, 62, 63, 53, 64, 54, 53, 63, 64, 54, 64, 55, 55, 65, 56, 55, 64, 65, 56, 66, 57, 56, 65, 66, 57, 66, 58, 58, 67, 59, 58, 66, 67, 59, 67, 60, 60, 67, 61, 61, 66, 62, 61, 67, 66, 62, 66, 63, 63, 65, 64, 63, 66, 65, 21, 27, 22]; -export const TRI33: Array = [ +export const TRI33: number[] = [ /* eyes */ 0, 8, 7, 7, 8, 1, 2, 10, 9, 9, 10, 3, /* brows */ 17, 0, 18, 18, 0, 7, 18, 7, 19, 19, 7, 1, 19, 1, 11, 19, 11, 20, 21, 3, 22, 21, 9, 3, 20, 9, 21, 20, 2, 9, 20, 11, 2, /* 4head */ 23, 17, 18, 25, 21, 22, 24, 19, 20, 24, 18, 19, 24, 20, 21, 24, 23, 18, 24, 21, 25, @@ -655,9 +655,9 @@ export const TRI33: Array = [ /* cont */ 26, 30, 5, 27, 6, 31, 0, 28, 26, 3, 27, 29, 17, 28, 0, 3, 29, 22, 23, 28, 17, 22, 29, 25, 28, 30, 26, 27, 31, 29, ]; -export const TRI7: Array = [0, 4, 1, 2, 4, 3, 4, 5, 6]; +export const TRI7: number[] = [0, 4, 1, 2, 4, 3, 4, 5, 6]; -export const VTX68: Array = [ +export const VTX68: number[] = [ /* cont */ 127, 234, 132, 58, 172, 150, 149, 148, 152, 377, 378, 379, 397, 288, 361, 454, 356, /* brows */ 70, 63, 105, 66, 107, 336, 296, 334, 293, 300, /* nose */ 168, 6, 195, 4, 98, 97, 2, 326, 327, @@ -666,9 +666,9 @@ export const VTX68: Array = [ /* mouth */ 78, 81, 13, 311, 308, 402, 14, 178, ]; -export const VTX33: Array = [33, 133, 362, 263, 1, 62, 308, 159, 145, 386, 374, 6, 102, 331, 2, 13, 14, 70, 105, 107, 336, 334, 300, 54, 10, 284, 50, 280, 234, 454, 58, 288, 152]; +export const VTX33: number[] = [33, 133, 362, 263, 1, 62, 308, 159, 145, 386, 374, 6, 102, 331, 2, 13, 14, 70, 105, 107, 336, 334, 300, 54, 10, 284, 50, 280, 234, 454, 58, 288, 152]; -export const VTX7: Array = [33, 133, 362, 263, 1, 78, 308]; +export const VTX7: number[] = [33, 133, 362, 263, 1, 78, 308]; export const UV68 = VTX68.map((x) => UV468[x]); @@ -679,7 +679,7 @@ export const UV7 = VTX7.map((x) => UV468[x]); // https://github.com/tensorflow/tfjs-models/blob/master/face-landmarks-detection/src/constants.ts // https://github.com/google/mediapipe/mediapipe/python/solutions/face_mesh_connections.py -type PairArray = Array<[number, number]>; +type PairArray = [number, number][]; function connectionsToIndices(connections: PairArray) { const indices = connections.map((connection) => connection[0]); diff --git a/src/face/facemeshutil.ts b/src/face/facemeshutil.ts index 83cf2fb4..a743b9cc 100644 --- a/src/face/facemeshutil.ts +++ b/src/face/facemeshutil.ts @@ -84,13 +84,13 @@ export const dot = (v1: number[], v2: number[]) => { }; export const getColumnFrom2DArr = (arr, columnIndex) => { - const column: Array = []; + const column: number[] = []; for (let i = 0; i < arr.length; i++) column.push(arr[i][columnIndex]); return column; }; export const multiplyTransformMatrices = (mat1, mat2) => { - const product: Array = []; + const product: number[][] = []; const size = mat1.length; for (let row = 0; row < size; row++) { product.push([]); @@ -124,7 +124,7 @@ export function generateAnchors(inputSize) { const spec = inputSize === 192 ? { strides: [4], anchors: [1] } // facemesh-detector : { strides: [inputSize / 16, inputSize / 8], anchors: [2, 6] }; // blazeface - const anchors: Array<[number, number]> = []; + const anchors: [number, number][] = []; for (let i = 0; i < spec.strides.length; i++) { const stride = spec.strides[i]; const gridRows = Math.floor((inputSize + stride - 1) / stride); diff --git a/src/face/faceres.ts b/src/face/faceres.ts index ef4a7c86..b511a473 100644 --- a/src/face/faceres.ts +++ b/src/face/faceres.ts @@ -16,15 +16,15 @@ import type { Tensor, GraphModel } from '../tfjs/types'; import type { Config } from '../config'; import type { Gender, Race } from '../result'; -export type FaceRes = { age: number, gender: Gender, genderScore: number, descriptor: number[], race?: { score: number, race: Race }[] }; +export interface FaceRes { age: number, gender: Gender, genderScore: number, descriptor: number[], race?: { score: number, race: Race }[] } let model: GraphModel | null; -const last: Array<{ +const last: { age: number, gender: Gender, genderScore: number, descriptor: number[], -}> = []; +}[] = []; let lastTime = 0; let lastCount = 0; @@ -88,7 +88,7 @@ export async function predict(image: Tensor, config: Config, idx: number, count: const genderT = await resT.find((t) => t.shape[1] === 1) as Tensor; const gender = await genderT.data(); const confidence = Math.trunc(200 * Math.abs((gender[0] - 0.5))) / 100; - if (confidence > (config.face.description?.minConfidence || 0)) { + if (confidence > (config.face.description.minConfidence || 0)) { obj.gender = gender[0] <= 0.5 ? 'female' : 'male'; obj.genderScore = Math.min(0.99, confidence); } diff --git a/src/face/insightface.ts b/src/face/insightface.ts index 98fa80fc..251f3701 100644 --- a/src/face/insightface.ts +++ b/src/face/insightface.ts @@ -14,7 +14,7 @@ import type { Config } from '../config'; import { env } from '../util/env'; let model: GraphModel | null; -const last: Array = []; +const last: number[][] = []; let lastCount = 0; let lastTime = 0; let skipped = Number.MAX_SAFE_INTEGER; @@ -35,14 +35,14 @@ export async function predict(input: Tensor, config: Config, idx, count): Promis return last[idx]; } return new Promise(async (resolve) => { - let data: Array = []; + let data: number[] = []; if (config.face['insightface']?.enabled && model?.inputs[0].shape) { const t: Record = {}; t.crop = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // just resize to fit the embedding model // do a tight crop of image and resize it to fit the model // const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right // t.crop = tf.image.cropAndResize(input, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]); - t.data = model?.execute(t.crop) as Tensor; + t.data = model.execute(t.crop) as Tensor; const output = await t.data.data(); data = Array.from(output); // convert typed array to simple array Object.keys(t).forEach((tensor) => tf.dispose(t[tensor])); diff --git a/src/face/iris.ts b/src/face/iris.ts index b9e41503..7ba42317 100644 --- a/src/face/iris.ts +++ b/src/face/iris.ts @@ -13,8 +13,8 @@ let inputSize = 0; const irisEnlarge = 2.3; -const leftOutline = coords.meshAnnotations['leftEyeLower0']; -const rightOutline = coords.meshAnnotations['rightEyeLower0']; +const leftOutline = coords.meshAnnotations.leftEyeLower0; +const rightOutline = coords.meshAnnotations.rightEyeLower0; const eyeLandmarks = { leftBounds: [leftOutline[0], leftOutline[leftOutline.length - 1]], @@ -80,7 +80,7 @@ export const getEyeBox = (rawCoords, face, eyeInnerCornerIndex, eyeOuterCornerIn // Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris. export const getEyeCoords = (eyeData, eyeBox, eyeBoxSize, flip = false) => { - const eyeRawCoords: Array = []; + const eyeRawCoords: Point[] = []; for (let i = 0; i < irisLandmarks.numCoordinates; i++) { const x = eyeData[i * 3]; const y = eyeData[i * 3 + 1]; diff --git a/src/face/liveness.ts b/src/face/liveness.ts index c2d1d9ce..1ae3424e 100644 --- a/src/face/liveness.ts +++ b/src/face/liveness.ts @@ -10,7 +10,7 @@ import * as tf from '../../dist/tfjs.esm.js'; import { env } from '../util/env'; let model: GraphModel | null; -const cached: Array = []; +const cached: number[] = []; let skipped = Number.MAX_SAFE_INTEGER; let lastCount = 0; let lastTime = 0; diff --git a/src/face/mask.ts b/src/face/mask.ts index 0fb0e80b..1d6e723d 100644 --- a/src/face/mask.ts +++ b/src/face/mask.ts @@ -7,7 +7,7 @@ const expandFact = 0.1; const alpha = 0.5; // point inclusion in polygon based on https://wrf.ecse.rpi.edu/Research/Short_Notes/pnpoly.html -function insidePoly(x: number, y: number, polygon: Array<{ x: number, y: number }>): boolean { +function insidePoly(x: number, y: number, polygon: { x: number, y: number }[]): boolean { let inside = false; let j = polygon.length - 1; for (let i = 0; i < polygon.length; j = i++) { @@ -22,7 +22,7 @@ export async function mask(face: FaceResult): Promise { const width = face.tensor.shape[2] || 0; const height = face.tensor.shape[1] || 0; const buffer = await face.tensor.buffer(); - let silhouette: Array<{ x: number, y: number }> = []; + let silhouette: { x: number, y: number }[] = []; for (const pt of meshAnnotations.silhouette) silhouette.push({ x: (face.mesh[pt][0] - face.box[0]) / face.box[2], y: (face.mesh[pt][1] - face.box[1]) / face.box[3] }); // add all silhouette points scaled to local box if (expandFact && expandFact > 0) silhouette = silhouette.map((pt) => ({ x: pt.x > 0.5 ? pt.x + expandFact : pt.x - expandFact, y: pt.y > 0.5 ? pt.y + expandFact : pt.y - expandFact })); // expand silhouette for (let x = 0; x < width; x++) { diff --git a/src/face/match.ts b/src/face/match.ts index a773ffb7..1141145e 100644 --- a/src/face/match.ts +++ b/src/face/match.ts @@ -1,5 +1,5 @@ /** Face descriptor type as number array */ -export type Descriptor = Array +export type Descriptor = number[] export type MatchOptions = { order?: number, threshold?: number, multiplier?: number, min?: number, max?: number } | undefined; /** Calculates distance between two descriptors @@ -54,7 +54,7 @@ export function similarity(descriptor1: Descriptor, descriptor2: Descriptor, opt * - `distance` calculated `distance` of given descriptor to the best match * - `similarity` calculated normalized `similarity` of given descriptor to the best match */ -export function match(descriptor: Descriptor, descriptors: Array, options: MatchOptions = { order: 2, multiplier: 25, threshold: 0, min: 0.2, max: 0.8 }) { +export function match(descriptor: Descriptor, descriptors: Descriptor[], options: MatchOptions = { order: 2, multiplier: 25, threshold: 0, min: 0.2, max: 0.8 }) { if (!Array.isArray(descriptor) || !Array.isArray(descriptors) || descriptor.length < 64 || descriptors.length === 0) { // validate input return { index: -1, distance: Number.POSITIVE_INFINITY, similarity: 0 }; } diff --git a/src/face/mobilefacenet.ts b/src/face/mobilefacenet.ts index 94186a8e..faa4271f 100644 --- a/src/face/mobilefacenet.ts +++ b/src/face/mobilefacenet.ts @@ -14,14 +14,14 @@ import type { Config } from '../config'; import { env } from '../util/env'; let model: GraphModel | null; -const last: Array = []; +const last: number[][] = []; let lastCount = 0; let lastTime = 0; let skipped = Number.MAX_SAFE_INTEGER; export async function load(config: Config): Promise { if (env.initial) model = null; - if (!model) model = await loadModel(config.face['mobilefacenet'].modelPath); + if (!model) model = await loadModel(config.face['mobilefacenet']?.modelPath); else if (config.debug) log('cached model:', model['modelUrl']); return model; } @@ -53,14 +53,14 @@ export async function predict(input: Tensor, config: Config, idx, count): Promis return last[idx]; } return new Promise(async (resolve) => { - let data: Array = []; + let data: number[] = []; if (config.face['mobilefacenet']?.enabled && model?.inputs[0].shape) { const t: Record = {}; t.crop = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // just resize to fit the embedding model // do a tight crop of image and resize it to fit the model // const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right // t.crop = tf.image.cropAndResize(input, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]); - t.data = model?.execute(t.crop) as Tensor; + t.data = model.execute(t.crop) as Tensor; /* // optional normalize outputs with l2 normalization const scaled = tf.tidy(() => { diff --git a/src/gear/emotion.ts b/src/gear/emotion.ts index 77d51c1b..7fb8b152 100644 --- a/src/gear/emotion.ts +++ b/src/gear/emotion.ts @@ -15,7 +15,7 @@ import { constants } from '../tfjs/constants'; const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral']; let model: GraphModel | null; -const last: Array> = []; +const last: { score: number, emotion: Emotion }[][] = []; let lastCount = 0; let lastTime = 0; let skipped = Number.MAX_SAFE_INTEGER; @@ -27,7 +27,7 @@ export async function load(config: Config): Promise { return model; } -export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise> { +export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<{ score: number, emotion: Emotion }[]> { if (!model) return []; const skipFrame = skipped < (config.face.emotion?.skipFrames || 0); const skipTime = (config.face.emotion?.skipTime || 0) > (now() - lastTime); @@ -37,7 +37,7 @@ export async function predict(image: Tensor, config: Config, idx: number, count: } skipped = 0; return new Promise(async (resolve) => { - const obj: Array<{ score: number, emotion: Emotion }> = []; + const obj: { score: number, emotion: Emotion }[] = []; if (config.face.emotion?.enabled) { const t: Record = {}; const inputSize = model?.inputs[0].shape ? model.inputs[0].shape[2] : 0; @@ -58,7 +58,7 @@ export async function predict(image: Tensor, config: Config, idx: number, count: lastTime = now(); const data = await t.emotion.data(); for (let i = 0; i < data.length; i++) { - if (data[i] > (config.face.emotion?.minConfidence || 0)) obj.push({ score: Math.min(0.99, Math.trunc(100 * data[i]) / 100), emotion: annotations[i] as Emotion }); + if (data[i] > (config.face.emotion.minConfidence || 0)) obj.push({ score: Math.min(0.99, Math.trunc(100 * data[i]) / 100), emotion: annotations[i] as Emotion }); } obj.sort((a, b) => b.score - a.score); Object.keys(t).forEach((tensor) => tf.dispose(t[tensor])); diff --git a/src/gear/gear.ts b/src/gear/gear.ts index 86262b33..9cacbba3 100644 --- a/src/gear/gear.ts +++ b/src/gear/gear.ts @@ -12,28 +12,26 @@ import type { Config } from '../config'; import type { GraphModel, Tensor } from '../tfjs/types'; import { env } from '../util/env'; -export type GearType = { age: number, gender: Gender, genderScore: number, race: Array<{ score: number, race: Race }> } +export interface GearType { age: number, gender: Gender, genderScore: number, race: { score: number, race: Race }[] } let model: GraphModel | null; -const last: Array = []; +const last: GearType[] = []; const raceNames = ['white', 'black', 'asian', 'indian', 'other']; const ageWeights = [15, 23, 28, 35.5, 45.5, 55.5, 65]; let lastCount = 0; let lastTime = 0; let skipped = Number.MAX_SAFE_INTEGER; -// eslint-disable-next-line @typescript-eslint/no-explicit-any export async function load(config: Config) { if (env.initial) model = null; - if (!model) model = await loadModel(config.face['gear']?.modelPath); + if (!model) model = await loadModel(config.face.gear?.modelPath); else if (config.debug) log('cached model:', model['modelUrl']); return model; } -// eslint-disable-next-line @typescript-eslint/no-explicit-any export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise { if (!model) return { age: 0, gender: 'unknown', genderScore: 0, race: [] }; - const skipFrame = skipped < (config.face['gear']?.skipFrames || 0); - const skipTime = (config.face['gear']?.skipTime || 0) > (now() - lastTime); + const skipFrame = skipped < (config.face.gear?.skipFrames || 0); + const skipTime = (config.face.gear?.skipTime || 0) > (now() - lastTime); if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && last[idx]) { skipped++; return last[idx]; @@ -46,13 +44,13 @@ export async function predict(image: Tensor, config: Config, idx: number, count: const box = [[0.0, 0.10, 0.90, 0.90]]; // empyrical values for top, left, bottom, right t.resize = tf.image.cropAndResize(image, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]); const obj: GearType = { age: 0, gender: 'unknown', genderScore: 0, race: [] }; - if (config.face['gear']?.enabled) [t.age, t.gender, t.race] = model.execute(t.resize, ['age_output', 'gender_output', 'race_output']) as Tensor[]; + if (config.face.gear?.enabled) [t.age, t.gender, t.race] = model.execute(t.resize, ['age_output', 'gender_output', 'race_output']) as Tensor[]; const gender = await t.gender.data(); obj.gender = gender[0] > gender[1] ? 'male' : 'female'; obj.genderScore = Math.round(100 * (gender[0] > gender[1] ? gender[0] : gender[1])) / 100; const race = await t.race.data(); for (let i = 0; i < race.length; i++) { - if (race[i] > (config.face['gear']?.minConfidence || 0.2)) obj.race.push({ score: Math.round(100 * race[i]) / 100, race: raceNames[i] as Race }); + if (race[i] > (config.face.gear?.minConfidence || 0.2)) obj.race.push({ score: Math.round(100 * race[i]) / 100, race: raceNames[i] as Race }); } obj.race.sort((a, b) => b.score - a.score); // {0: 'Below20', 1: '21-25', 2: '26-30', 3: '31-40',4: '41-50', 5: '51-60', 6: 'Above60'} diff --git a/src/gear/ssrnet-age.ts b/src/gear/ssrnet-age.ts index 904640b0..95621838 100644 --- a/src/gear/ssrnet-age.ts +++ b/src/gear/ssrnet-age.ts @@ -13,12 +13,11 @@ import type { Config } from '../config'; import type { GraphModel, Tensor } from '../tfjs/types'; let model: GraphModel | null; -const last: Array<{ age: number }> = []; +const last: { age: number }[] = []; let lastCount = 0; let lastTime = 0; let skipped = Number.MAX_SAFE_INTEGER; -// eslint-disable-next-line @typescript-eslint/no-explicit-any export async function load(config: Config) { if (env.initial) model = null; if (!model) model = await loadModel(config.face['ssrnet'].modelPathAge); @@ -26,7 +25,6 @@ export async function load(config: Config) { return model; } -// eslint-disable-next-line @typescript-eslint/no-explicit-any export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<{ age: number }> { if (!model) return { age: 0 }; const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0); @@ -42,7 +40,7 @@ export async function predict(image: Tensor, config: Config, idx: number, count: t.resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); t.enhance = tf.mul(t.resize, constants.tf255); const obj = { age: 0 }; - if (config.face['ssrnet'].enabled) t.age = model.execute(t.enhance) as Tensor; + if (config.face['ssrnet']?.enabled) t.age = model.execute(t.enhance) as Tensor; if (t.age) { const data = await t.age.data(); obj.age = Math.trunc(10 * data[0]) / 10; diff --git a/src/gear/ssrnet-gender.ts b/src/gear/ssrnet-gender.ts index f5f9da1a..f91c6fb0 100644 --- a/src/gear/ssrnet-gender.ts +++ b/src/gear/ssrnet-gender.ts @@ -14,7 +14,7 @@ import type { GraphModel, Tensor } from '../tfjs/types'; import { env } from '../util/env'; let model: GraphModel | null; -const last: Array<{ gender: Gender, genderScore: number }> = []; +const last: { gender: Gender, genderScore: number }[] = []; let lastCount = 0; let lastTime = 0; let skipped = Number.MAX_SAFE_INTEGER; @@ -22,15 +22,13 @@ let skipped = Number.MAX_SAFE_INTEGER; // tuning values const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export async function load(config: Config | any) { +export async function load(config: Config) { if (env.initial) model = null; - if (!model) model = await loadModel(config.face['ssrnet'].modelPathGender); + if (!model) model = await loadModel(config.face['ssrnet']?.modelPathGender); else if (config.debug) log('cached model:', model['modelUrl']); return model; } -// eslint-disable-next-line @typescript-eslint/no-explicit-any export async function predict(image: Tensor, config: Config, idx, count): Promise<{ gender: Gender, genderScore: number }> { if (!model) return { gender: 'unknown', genderScore: 0 }; const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0); @@ -54,7 +52,7 @@ export async function predict(image: Tensor, config: Config, idx, count): Promis return normalize; }); const obj: { gender: Gender, genderScore: number } = { gender: 'unknown', genderScore: 0 }; - if (config.face['ssrnet'].enabled) t.gender = model.execute(t.enhance) as Tensor; + if (config.face['ssrnet']?.enabled) t.gender = model.execute(t.enhance) as Tensor; const data = await t.gender.data(); obj.gender = data[0] > data[1] ? 'female' : 'male'; // returns two values 0..1, bigger one is prediction obj.genderScore = data[0] > data[1] ? (Math.trunc(100 * data[0]) / 100) : (Math.trunc(100 * data[1]) / 100); diff --git a/src/gesture/gesture.ts b/src/gesture/gesture.ts index 0a940a7c..8d201ee5 100644 --- a/src/gesture/gesture.ts +++ b/src/gesture/gesture.ts @@ -33,7 +33,7 @@ export type HandGesture = export const body = (res: BodyResult[]): GestureResult[] => { if (!res) return []; - const gestures: Array<{ body: number, gesture: BodyGesture }> = []; + const gestures: { body: number, gesture: BodyGesture }[] = []; for (let i = 0; i < res.length; i++) { // raising hands const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist')); @@ -55,7 +55,7 @@ export const body = (res: BodyResult[]): GestureResult[] => { export const face = (res: FaceResult[]): GestureResult[] => { if (!res) return []; - const gestures: Array<{ face: number, gesture: FaceGesture }> = []; + const gestures: { face: number, gesture: FaceGesture }[] = []; for (let i = 0; i < res.length; i++) { if (res[i].mesh && res[i].mesh.length > 450) { const zDiff = (res[i].mesh[33][2] || 0) - (res[i].mesh[263][2] || 0); @@ -77,7 +77,7 @@ export const face = (res: FaceResult[]): GestureResult[] => { export const iris = (res: FaceResult[]): GestureResult[] => { if (!res) return []; - const gestures: Array<{ iris: number, gesture: IrisGesture }> = []; + const gestures: { iris: number, gesture: IrisGesture }[] = []; for (let i = 0; i < res.length; i++) { if (!res[i].annotations || !res[i].annotations.leftEyeIris || !res[i].annotations.leftEyeIris[0] || !res[i].annotations.rightEyeIris || !res[i].annotations.rightEyeIris[0]) continue; const sizeXLeft = res[i].annotations.leftEyeIris[3][0] - res[i].annotations.leftEyeIris[1][0]; @@ -118,11 +118,11 @@ export const iris = (res: FaceResult[]): GestureResult[] => { export const hand = (res: HandResult[]): GestureResult[] => { if (!res) return []; - const gestures: Array<{ hand: number, gesture: HandGesture }> = []; + const gestures: { hand: number, gesture: HandGesture }[] = []; for (let i = 0; i < res.length; i++) { - const fingers: Array<{ name: string, position: Point }> = []; - if (res[i]['annotations']) { - for (const [finger, pos] of Object.entries(res[i]['annotations'])) { + const fingers: { name: string, position: Point }[] = []; + if (res[i].annotations) { + for (const [finger, pos] of Object.entries(res[i].annotations)) { if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger } } @@ -132,8 +132,8 @@ export const hand = (res: HandResult[]): GestureResult[] => { const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a)); gestures.push({ hand: i, gesture: `${highest.name} up` as HandGesture }); } - if (res[i]['keypoints']) { - const poses = fingerPose.match(res[i]['keypoints']); + if (res[i].keypoints) { + const poses = fingerPose.match(res[i].keypoints); for (const pose of poses) gestures.push({ hand: i, gesture: pose.name as HandGesture }); } } diff --git a/src/hand/fingergesture.ts b/src/hand/fingergesture.ts index 126810f2..98397c84 100644 --- a/src/hand/fingergesture.ts +++ b/src/hand/fingergesture.ts @@ -5,11 +5,8 @@ import { Finger, FingerCurl, FingerDirection, FingerGesture } from './fingerdef'; -// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars export const { thumb, index, middle, ring, pinky } = Finger; -// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars export const { none, half, full } = FingerCurl; -// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars export const { verticalUp, verticalDown, horizontalLeft, horizontalRight, diagonalUpRight, diagonalUpLeft, diagonalDownRight, diagonalDownLeft } = FingerDirection; // describe thumbs up gesture 👍 diff --git a/src/hand/fingerpose.ts b/src/hand/fingerpose.ts index e7513234..916622e6 100644 --- a/src/hand/fingerpose.ts +++ b/src/hand/fingerpose.ts @@ -4,6 +4,8 @@ * Based on: [**FingerPose***](https://github.com/andypotato/fingerpose) */ +/* eslint-disable camelcase */ + import { Finger, FingerCurl, FingerDirection } from './fingerdef'; import Gestures from '../hand/fingergesture'; @@ -174,17 +176,17 @@ function calculateFingerDirection(startPoint, midPoint, endPoint, fingerSlopes) function estimate(landmarks) { // step 1: calculate slopes - const slopesXY: Array = []; - const slopesYZ: Array = []; - const fingerCurls: Array = []; - const fingerDirections: Array = []; + const slopesXY: number[][] = []; + const slopesYZ: number[][] = []; + const fingerCurls: number[] = []; + const fingerDirections: number[] = []; if (!landmarks) return { curls: fingerCurls, directions: fingerDirections }; // step 1: calculate slopes for (const finger of Finger.all) { const points = Finger.getPoints(finger); - const slopeAtXY: Array = []; - const slopeAtYZ: Array = []; + const slopeAtXY: number[] = []; + const slopeAtYZ: number[] = []; for (const point of points) { const point1 = landmarks[point[0]]; const point2 = landmarks[point[1]]; @@ -230,7 +232,7 @@ export function analyze(keypoints) { // get estimations of curl / direction for } export function match(keypoints) { // compare gesture description to each known gesture - const poses: Array<{ name: string, confidence: number }> = []; + const poses: { name: string, confidence: number }[] = []; if (!keypoints || keypoints.length === 0) return poses; const estimatorRes = estimate(keypoints); for (const gesture of Gestures) { diff --git a/src/hand/handpose.ts b/src/hand/handpose.ts index aa24a09f..40f294b3 100644 --- a/src/hand/handpose.ts +++ b/src/hand/handpose.ts @@ -30,7 +30,7 @@ let handPipeline: handpipeline.HandPipeline; export async function predict(input: Tensor, config: Config): Promise { const predictions = await handPipeline.estimateHands(input, config); if (!predictions) return []; - const hands: Array = []; + const hands: HandResult[] = []; for (let i = 0; i < predictions.length; i++) { const annotations = {}; if (predictions[i].landmarks) { @@ -38,7 +38,7 @@ export async function predict(input: Tensor, config: Config): Promise predictions[i].landmarks[index]); } } - const keypoints = predictions[i].landmarks as unknown as Array; + const keypoints = predictions[i].landmarks as unknown as Point[]; let box: Box = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0]; // maximums so conditionals work let boxRaw: Box = [0, 0, 0, 0]; if (keypoints && keypoints.length > 0) { // if we have landmarks, calculate box based on landmarks diff --git a/src/hand/handposedetector.ts b/src/hand/handposedetector.ts index 81a11597..c1507fae 100644 --- a/src/hand/handposedetector.ts +++ b/src/hand/handposedetector.ts @@ -68,8 +68,8 @@ export class HandDetector { t.norm = this.normalizeBoxes(t.boxes); // box detection is flaky so we look for 3x boxes than we need results t.nms = await tf.image.nonMaxSuppressionAsync(t.norm, t.scores, 3 * config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence); - const nms = await t.nms.array() as Array; - const hands: Array<{ startPoint: Point; endPoint: Point; palmLandmarks: Point[]; confidence: number }> = []; + const nms = await t.nms.array() as number[]; + const hands: { startPoint: Point; endPoint: Point; palmLandmarks: Point[]; confidence: number }[] = []; for (const index of nms) { const p: Record = {}; p.box = tf.slice(t.norm, [index, 0], [1, -1]); diff --git a/src/hand/handposepipeline.ts b/src/hand/handposepipeline.ts index cfa0030d..704fb8bb 100644 --- a/src/hand/handposepipeline.ts +++ b/src/hand/handposepipeline.ts @@ -23,7 +23,7 @@ export class HandPipeline { handDetector: detector.HandDetector; handPoseModel: GraphModel; inputSize: number; - storedBoxes: Array<{ startPoint: Point; endPoint: Point; palmLandmarks: Point[]; confidence: number } | null>; + storedBoxes: ({ startPoint: Point; endPoint: Point; palmLandmarks: Point[]; confidence: number } | null)[]; skipped: number; detectedHands: number; @@ -36,8 +36,7 @@ export class HandPipeline { this.detectedHands = 0; } - // eslint-disable-next-line class-methods-use-this - calculateLandmarksBoundingBox(landmarks) { + calculateLandmarksBoundingBox(landmarks) { // eslint-disable-line class-methods-use-this const xs = landmarks.map((d) => d[0]); const ys = landmarks.map((d) => d[1]); const startPoint = [Math.min(...xs), Math.min(...ys)]; @@ -107,7 +106,7 @@ export class HandPipeline { // for (const possible of boxes) this.storedBoxes.push(possible); if (this.storedBoxes.length > 0) useFreshBox = true; } - const hands: Array<{ landmarks: Point[], confidence: number, boxConfidence: number, fingerConfidence: number, box: { topLeft: Point, bottomRight: Point } }> = []; + const hands: { landmarks: Point[], confidence: number, boxConfidence: number, fingerConfidence: number, box: { topLeft: Point, bottomRight: Point } }[] = []; // go through working set of boxes for (let i = 0; i < this.storedBoxes.length; i++) { @@ -124,7 +123,7 @@ export class HandPipeline { const handImage = tf.div(croppedInput, constants.tf255); tf.dispose(croppedInput); tf.dispose(rotatedImage); - const [confidenceT, keypoints] = this.handPoseModel.execute(handImage) as Array; + const [confidenceT, keypoints] = this.handPoseModel.execute(handImage) as Tensor[]; lastTime = now(); tf.dispose(handImage); const confidence = (await confidenceT.data())[0]; diff --git a/src/hand/handposeutil.ts b/src/hand/handposeutil.ts index f8f18890..d0c0bfd5 100644 --- a/src/hand/handposeutil.ts +++ b/src/hand/handposeutil.ts @@ -87,7 +87,7 @@ export function dot(v1, v2) { } export function getColumnFrom2DArr(arr, columnIndex) { - const column: Array = []; + const column: number[] = []; for (let i = 0; i < arr.length; i++) { column.push(arr[i][columnIndex]); } @@ -95,7 +95,7 @@ export function getColumnFrom2DArr(arr, columnIndex) { } export function multiplyTransformMatrices(mat1, mat2) { - const product: Array = []; + const product: number[][] = []; const size = mat1.length; for (let row = 0; row < size; row++) { product.push([]); diff --git a/src/hand/handtrack.ts b/src/hand/handtrack.ts index 71f83755..13aef772 100644 --- a/src/hand/handtrack.ts +++ b/src/hand/handtrack.ts @@ -34,7 +34,7 @@ let skipped = Number.MAX_SAFE_INTEGER; let lastTime = 0; let outputSize: [number, number] = [0, 0]; -type HandDetectResult = { +interface HandDetectResult { id: number, score: number, box: Box, @@ -43,8 +43,8 @@ type HandDetectResult = { } const cache: { - boxes: Array, - hands: Array; + boxes: HandDetectResult[], + hands: HandResult[]; } = { boxes: [], hands: [], @@ -112,7 +112,7 @@ async function detectHands(input: Tensor, config: Config): Promise = tf.unstack(t.scores, 1); // unstack scores based on classes + const classScores: Tensor[] = tf.unstack(t.scores, 1); // unstack scores based on classes tf.dispose(classScores[faceIndex]); classScores.splice(faceIndex, 1); // remove faces t.filtered = tf.stack(classScores, 1); // restack @@ -182,7 +182,7 @@ async function detectFingers(input: Tensor, h: HandDetectResult, config: Config) } export async function predict(input: Tensor, config: Config): Promise { - if (!models[0] || !models[1] || !models[0]?.inputs[0].shape || !models[1]?.inputs[0].shape) return []; // something is wrong with the model + if (!models[0] || !models[1] || !models[0].inputs[0].shape || !models[1].inputs[0].shape) return []; // something is wrong with the model outputSize = [input.shape[2] || 0, input.shape[1] || 0]; skipped++; // increment skip frames const skipTime = (config.hand.skipTime || 0) > (now() - lastTime); diff --git a/src/human.ts b/src/human.ts index 11b53728..dcc16099 100644 --- a/src/human.ts +++ b/src/human.ts @@ -130,7 +130,7 @@ export class Human { ? 'https://vladmandic.github.io/tfjs/dist/' : `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`; */ - const tfVersion = (tf.version?.tfjs || tf.version_core).replace(/-(.*)/, ''); + const tfVersion = (tf.version.tfjs || tf.version_core).replace(/-(.*)/, ''); defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tfVersion}/dist/`; defaults.modelBasePath = env.browser ? '../models/' : 'file://models/'; defaults.backend = env.browser ? 'humangl' : 'tensorflow'; @@ -152,7 +152,7 @@ export class Human { this.models = new models.Models(); // reexport draw methods this.draw = { - options: draw.options as DrawOptions, + options: draw.options, canvas: (input: AnyCanvas | HTMLImageElement | HTMLVideoElement, output: AnyCanvas) => draw.canvas(input, output), face: (output: AnyCanvas, result: FaceResult[], options?: Partial) => draw.face(output, result, options), body: (output: AnyCanvas, result: BodyResult[], options?: Partial) => draw.body(output, result, options), @@ -164,7 +164,6 @@ export class Human { }; this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [], error: null }; // export access to image processing - // @ts-ignore eslint-typescript cannot correctly infer type in anonymous function this.process = { tensor: null, canvas: null }; // export raw access to underlying models this.faceTriangulation = facemesh.triangulation; @@ -225,7 +224,7 @@ export class Human { public match = match.match; /** Utility wrapper for performance.now() */ - now(): number { + now(): number { // eslint-disable-line class-methods-use-this return now(); } @@ -258,8 +257,7 @@ export class Human { * @param input - Tensor as provided in human.result.face[n].tensor * @returns Tensor */ - // eslint-disable-next-line class-methods-use-this - enhance(input: Tensor): Tensor | null { + enhance(input: Tensor): Tensor | null { // eslint-disable-line class-methods-use-this return faceres.enhance(input); } @@ -303,7 +301,7 @@ export class Human { if (this.env.browser) { if (this.config.debug) log('configuration:', this.config); if (this.config.debug) log('environment:', this.env); - if (this.config.debug) log('tf flags:', this.tf.ENV['flags']); + if (this.config.debug) log('tf flags:', this.tf.ENV.flags); } } @@ -313,17 +311,17 @@ export class Human { const loaded = Object.values(this.models).filter((model) => model).length; if (loaded !== count) { // number of loaded models changed - await models.validate(this); // validate kernel ops used by model against current backend + models.validate(this); // validate kernel ops used by model against current backend this.emit('load'); } const current = Math.trunc(now() - timeStamp); - if (current > (this.performance.loadModels as number || 0)) this.performance.loadModels = this.env.perfadd ? (this.performance.loadModels || 0) + current : current; + if (current > (this.performance.loadModels || 0)) this.performance.loadModels = this.env.perfadd ? (this.performance.loadModels || 0) + current : current; } /** emit event */ emit = (event: string) => { - if (this.events && this.events.dispatchEvent) this.events?.dispatchEvent(new Event(event)); + if (this.events && this.events.dispatchEvent) this.events.dispatchEvent(new Event(event)); }; /** Runs interpolation using last known result and returns smoothened result @@ -333,7 +331,7 @@ export class Human { * @returns result - {@link Result} */ next(result: Result = this.result): Result { - return interpolate.calc(result, this.config) as Result; + return interpolate.calc(result, this.config); } /** get model loading/loaded stats */ @@ -357,7 +355,7 @@ export class Human { * - result object will contain total exeuction time information for top-20 kernels * - actual detection object can be accessed via `human.result` */ - async profile(input: Input, userConfig?: Partial): Promise> { + async profile(input: Input, userConfig?: Partial): Promise<{ kernel: string, time: number, perc: number }[]> { const profile = await this.tf.profile(() => this.detect(input, userConfig)); const kernels: Record = {}; let total = 0; @@ -366,7 +364,7 @@ export class Human { else kernels[kernel.name] = kernel.kernelTimeMs; total += kernel.kernelTimeMs; } - const kernelArr: Array<{ kernel: string, time: number, perc: number }> = []; + const kernelArr: { kernel: string, time: number, perc: number }[] = []; Object.entries(kernels).forEach((key) => kernelArr.push({ kernel: key[0], time: key[1] as unknown as number, perc: 0 })); // convert to array for (const kernel of kernelArr) { kernel.perc = Math.round(1000 * kernel.time / total) / 1000; @@ -433,7 +431,7 @@ export class Human { this.config.skipAllowed = await image.skip(this.config, img.tensor); if (!this.performance.totalFrames) this.performance.totalFrames = 0; if (!this.performance.cachedFrames) this.performance.cachedFrames = 0; - (this.performance.totalFrames as number)++; + (this.performance.totalFrames)++; if (this.config.skipAllowed) this.performance.cachedFrames++; this.performance.cacheCheck = this.env.perfadd ? (this.performance.cacheCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp); this.analyze('Check Changed:'); @@ -524,7 +522,7 @@ export class Human { } this.performance.total = this.env.perfadd ? (this.performance.total || 0) + Math.trunc(now() - timeStart) : Math.trunc(now() - timeStart); - const shape = this.process?.tensor?.shape || []; + const shape = this.process.tensor?.shape || []; this.result = { face: faceRes as FaceResult[], body: bodyRes as BodyResult[], diff --git a/src/image/image.ts b/src/image/image.ts index f33ea719..7ff470ae 100644 --- a/src/image/image.ts +++ b/src/image/image.ts @@ -81,7 +81,7 @@ export async function process(input: Input, config: Config, getTensor: boolean = if (input instanceof tf.Tensor) { // if input is tensor use as-is without filters but correct shape as needed let tensor: Tensor | null = null; if ((input as Tensor)['isDisposedInternal']) throw new Error('input error: attempted to use tensor but it is disposed'); - if (!(input as Tensor)['shape']) throw new Error('input error: attempted to use tensor without a shape'); + if (!(input as Tensor).shape) throw new Error('input error: attempted to use tensor without a shape'); if ((input as Tensor).shape.length === 3) { // [height, width, 3 || 4] if ((input as Tensor).shape[2] === 3) { // [height, width, 3] so add batch tensor = tf.expandDims(input, 0); @@ -98,140 +98,139 @@ export async function process(input: Input, config: Config, getTensor: boolean = } } // at the end shape must be [1, height, width, 3] - if (tensor == null || tensor.shape.length !== 4 || tensor.shape[0] !== 1 || tensor.shape[3] !== 3) throw new Error(`input error: attempted to use tensor with unrecognized shape: ${input['shape']}`); - if ((tensor as Tensor).dtype === 'int32') { + if (tensor == null || (tensor as Tensor).shape.length !== 4 || (tensor as Tensor).shape[0] !== 1 || (tensor as Tensor).shape[3] !== 3) throw new Error(`input error: attempted to use tensor with unrecognized shape: ${(input as Tensor).shape}`); + if ((tensor).dtype === 'int32') { const cast = tf.cast(tensor, 'float32'); tf.dispose(tensor); tensor = cast; } return { tensor, canvas: (config.filter.return ? outCanvas : null) }; - } else { - // check if resizing will be needed - if (typeof input['readyState'] !== 'undefined' && input['readyState'] <= 2) { - if (config.debug) log('input stream is not ready'); - return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize - } - const originalWidth = input['naturalWidth'] || input['videoWidth'] || input['width'] || (input['shape'] && (input['shape'][1] > 0)); - const originalHeight = input['naturalHeight'] || input['videoHeight'] || input['height'] || (input['shape'] && (input['shape'][2] > 0)); - if (!originalWidth || !originalHeight) { - if (config.debug) log('cannot determine input dimensions'); - return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize - } - let targetWidth = originalWidth; - let targetHeight = originalHeight; - if (targetWidth > maxSize) { - targetWidth = maxSize; - targetHeight = Math.trunc(targetWidth * originalHeight / originalWidth); - } - if (targetHeight > maxSize) { - targetHeight = maxSize; - targetWidth = Math.trunc(targetHeight * originalWidth / originalHeight); - } - - // create our canvas and resize it if needed - if ((config.filter.width || 0) > 0) targetWidth = config.filter.width; - else if ((config.filter.height || 0) > 0) targetWidth = originalWidth * ((config.filter.height || 0) / originalHeight); - if ((config.filter.height || 0) > 0) targetHeight = config.filter.height; - else if ((config.filter.width || 0) > 0) targetHeight = originalHeight * ((config.filter.width || 0) / originalWidth); - if (!targetWidth || !targetHeight) throw new Error('input error: cannot determine dimension'); - if (!inCanvas || (inCanvas?.width !== targetWidth) || (inCanvas?.height !== targetHeight)) inCanvas = canvas(targetWidth, targetHeight); - - // draw input to our canvas - const inCtx = inCanvas.getContext('2d') as CanvasRenderingContext2D; - if ((typeof ImageData !== 'undefined') && (input instanceof ImageData)) { - inCtx.putImageData(input, 0, 0); - } else { - if (config.filter.flip && typeof inCtx.translate !== 'undefined') { - inCtx.translate(originalWidth, 0); - inCtx.scale(-1, 1); - inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height); - inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults - } else { - inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height); - } - } - - if (!outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas?.height !== outCanvas?.height)) outCanvas = canvas(inCanvas.width, inCanvas.height); // init output canvas - - // imagefx transforms using gl from input canvas to output canvas - if (config.filter.enabled && env.webgl.supported) { - if (!fx) fx = env.browser ? new fxImage.GLImageFilter() : null; // && (typeof document !== 'undefined') - env.filter = !!fx; - if (!fx || !fx.add) { - if (config.debug) log('input process error: cannot initialize filters'); - env.webgl.supported = false; - config.filter.enabled = false; - copy(inCanvas, outCanvas); // filter failed to initialize - // return { tensor: null, canvas: inCanvas }; - } else { - fx.reset(); - if (config.filter.brightness !== 0) fx.add('brightness', config.filter.brightness); - if (config.filter.contrast !== 0) fx.add('contrast', config.filter.contrast); - if (config.filter.sharpness !== 0) fx.add('sharpen', config.filter.sharpness); - if (config.filter.blur !== 0) fx.add('blur', config.filter.blur); - if (config.filter.saturation !== 0) fx.add('saturation', config.filter.saturation); - if (config.filter.hue !== 0) fx.add('hue', config.filter.hue); - if (config.filter.negative) fx.add('negative'); - if (config.filter.sepia) fx.add('sepia'); - if (config.filter.vintage) fx.add('brownie'); - if (config.filter.sepia) fx.add('sepia'); - if (config.filter.kodachrome) fx.add('kodachrome'); - if (config.filter.technicolor) fx.add('technicolor'); - if (config.filter.polaroid) fx.add('polaroid'); - if (config.filter.pixelate !== 0) fx.add('pixelate', config.filter.pixelate); - if (fx.get() > 0) outCanvas = fx.apply(inCanvas); - else outCanvas = fx.draw(inCanvas); - } - } else { - copy(inCanvas, outCanvas); // if no filters applied, output canvas is input canvas - if (fx) fx = null; - env.filter = !!fx; - } - - if (!getTensor) return { tensor: null, canvas: outCanvas }; // just canvas was requested - if (!outCanvas) throw new Error('canvas error: cannot create output'); - - // create tensor from image unless input was a tensor already - let pixels; - let depth = 3; - if ((typeof ImageData !== 'undefined' && input instanceof ImageData) || (input['data'] && input['width'] && input['height'])) { // if input is imagedata, just use it - if (env.browser && tf.browser) { - pixels = tf.browser ? tf.browser.fromPixels(input) : null; - } else { - depth = input['data'].length / input['height'] / input['width']; - // const arr = Uint8Array.from(input['data']); - const arr = new Uint8Array(input['data']['buffer']); - pixels = tf.tensor(arr, [input['height'], input['width'], depth], 'int32'); - } - } else { - if (!tmpCanvas || (outCanvas.width !== tmpCanvas.width) || (outCanvas.height !== tmpCanvas.height)) tmpCanvas = canvas(outCanvas.width, outCanvas.height); // init output canvas - if (tf.browser && env.browser) { - if (config.backend === 'webgl' || config.backend === 'humangl' || config.backend === 'webgpu') { - pixels = tf.browser.fromPixels(outCanvas); // safe to reuse since both backend and context are gl based - } else { - tmpCanvas = copy(outCanvas); // cannot use output canvas as it already has gl context so we do a silly one more canvas - pixels = tf.browser.fromPixels(tmpCanvas); - } - } else { - const tempCanvas = copy(outCanvas); // cannot use output canvas as it already has gl context so we do a silly one more canvas - const tempCtx = tempCanvas.getContext('2d') as CanvasRenderingContext2D; - const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight); - depth = tempData.data.length / targetWidth / targetHeight; - const arr = new Uint8Array(tempData.data.buffer); - pixels = tf.tensor(arr, [targetWidth, targetHeight, depth]); - } - } - if (depth === 4) { // rgba to rgb - const rgb = tf.slice3d(pixels, [0, 0, 0], [-1, -1, 3]); // strip alpha channel - tf.dispose(pixels); - pixels = rgb; - } - if (!pixels) throw new Error('input error: cannot create tensor'); - const casted = tf.cast(pixels, 'float32'); - const tensor = config.filter.equalization ? await enhance.histogramEqualization(casted) : tf.expandDims(casted, 0); - tf.dispose([pixels, casted]); - return { tensor, canvas: (config.filter.return ? outCanvas : null) }; } + // check if resizing will be needed + if (typeof input['readyState'] !== 'undefined' && (input as HTMLMediaElement).readyState <= 2) { + if (config.debug) log('input stream is not ready'); + return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize + } + const originalWidth = input['naturalWidth'] || input['videoWidth'] || input['width'] || (input['shape'] && (input['shape'][1] > 0)); + const originalHeight = input['naturalHeight'] || input['videoHeight'] || input['height'] || (input['shape'] && (input['shape'][2] > 0)); + if (!originalWidth || !originalHeight) { + if (config.debug) log('cannot determine input dimensions'); + return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize + } + let targetWidth = originalWidth; + let targetHeight = originalHeight; + if (targetWidth > maxSize) { + targetWidth = maxSize; + targetHeight = Math.trunc(targetWidth * originalHeight / originalWidth); + } + if (targetHeight > maxSize) { + targetHeight = maxSize; + targetWidth = Math.trunc(targetHeight * originalWidth / originalHeight); + } + + // create our canvas and resize it if needed + if ((config.filter.width || 0) > 0) targetWidth = config.filter.width; + else if ((config.filter.height || 0) > 0) targetWidth = originalWidth * ((config.filter.height || 0) / originalHeight); + if ((config.filter.height || 0) > 0) targetHeight = config.filter.height; + else if ((config.filter.width || 0) > 0) targetHeight = originalHeight * ((config.filter.width || 0) / originalWidth); + if (!targetWidth || !targetHeight) throw new Error('input error: cannot determine dimension'); + if (!inCanvas || (inCanvas.width !== targetWidth) || (inCanvas.height !== targetHeight)) inCanvas = canvas(targetWidth, targetHeight); + + // draw input to our canvas + const inCtx = inCanvas.getContext('2d') as CanvasRenderingContext2D; + if ((typeof ImageData !== 'undefined') && (input instanceof ImageData)) { + inCtx.putImageData(input, 0, 0); + } else { + if (config.filter.flip && typeof inCtx.translate !== 'undefined') { + inCtx.translate(originalWidth, 0); + inCtx.scale(-1, 1); + inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height); + inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults + } else { + inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height); + } + } + + if (!outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas.height !== outCanvas.height)) outCanvas = canvas(inCanvas.width, inCanvas.height); // init output canvas + + // imagefx transforms using gl from input canvas to output canvas + if (config.filter.enabled && env.webgl.supported) { + if (!fx) fx = env.browser ? new fxImage.GLImageFilter() : null; // && (typeof document !== 'undefined') + env.filter = !!fx; + if (!fx || !fx.add) { + if (config.debug) log('input process error: cannot initialize filters'); + env.webgl.supported = false; + config.filter.enabled = false; + copy(inCanvas, outCanvas); // filter failed to initialize + // return { tensor: null, canvas: inCanvas }; + } else { + fx.reset(); + if (config.filter.brightness !== 0) fx.add('brightness', config.filter.brightness); + if (config.filter.contrast !== 0) fx.add('contrast', config.filter.contrast); + if (config.filter.sharpness !== 0) fx.add('sharpen', config.filter.sharpness); + if (config.filter.blur !== 0) fx.add('blur', config.filter.blur); + if (config.filter.saturation !== 0) fx.add('saturation', config.filter.saturation); + if (config.filter.hue !== 0) fx.add('hue', config.filter.hue); + if (config.filter.negative) fx.add('negative'); + if (config.filter.sepia) fx.add('sepia'); + if (config.filter.vintage) fx.add('brownie'); + if (config.filter.sepia) fx.add('sepia'); + if (config.filter.kodachrome) fx.add('kodachrome'); + if (config.filter.technicolor) fx.add('technicolor'); + if (config.filter.polaroid) fx.add('polaroid'); + if (config.filter.pixelate !== 0) fx.add('pixelate', config.filter.pixelate); + if (fx.get() > 0) outCanvas = fx.apply(inCanvas); + else outCanvas = fx.draw(inCanvas); + } + } else { + copy(inCanvas, outCanvas); // if no filters applied, output canvas is input canvas + if (fx) fx = null; + env.filter = !!fx; + } + + if (!getTensor) return { tensor: null, canvas: outCanvas }; // just canvas was requested + if (!outCanvas) throw new Error('canvas error: cannot create output'); + + // create tensor from image unless input was a tensor already + let pixels; + let depth = 3; + if ((typeof ImageData !== 'undefined' && input instanceof ImageData) || ((input as ImageData).data && (input as ImageData).width && (input as ImageData).height)) { // if input is imagedata, just use it + if (env.browser && tf.browser) { + pixels = tf.browser ? tf.browser.fromPixels(input) : null; + } else { + depth = (input as ImageData).data.length / (input as ImageData).height / (input as ImageData).width; + // const arr = Uint8Array.from(input['data']); + const arr = new Uint8Array((input as ImageData).data.buffer); + pixels = tf.tensor(arr, [(input as ImageData).height, (input as ImageData).width, depth], 'int32'); + } + } else { + if (!tmpCanvas || (outCanvas.width !== tmpCanvas.width) || (outCanvas.height !== tmpCanvas.height)) tmpCanvas = canvas(outCanvas.width, outCanvas.height); // init output canvas + if (tf.browser && env.browser) { + if (config.backend === 'webgl' || config.backend === 'humangl' || config.backend === 'webgpu') { + pixels = tf.browser.fromPixels(outCanvas); // safe to reuse since both backend and context are gl based + } else { + tmpCanvas = copy(outCanvas); // cannot use output canvas as it already has gl context so we do a silly one more canvas + pixels = tf.browser.fromPixels(tmpCanvas); + } + } else { + const tempCanvas = copy(outCanvas); // cannot use output canvas as it already has gl context so we do a silly one more canvas + const tempCtx = tempCanvas.getContext('2d') as CanvasRenderingContext2D; + const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight); + depth = tempData.data.length / targetWidth / targetHeight; + const arr = new Uint8Array(tempData.data.buffer); + pixels = tf.tensor(arr, [targetWidth, targetHeight, depth]); + } + } + if (depth === 4) { // rgba to rgb + const rgb = tf.slice3d(pixels, [0, 0, 0], [-1, -1, 3]); // strip alpha channel + tf.dispose(pixels); + pixels = rgb; + } + if (!pixels) throw new Error('input error: cannot create tensor'); + const casted = tf.cast(pixels, 'float32'); + const tensor = config.filter.equalization ? await enhance.histogramEqualization(casted) : tf.expandDims(casted, 0); + tf.dispose([pixels, casted]); + return { tensor, canvas: (config.filter.return ? outCanvas : null) }; } /* diff --git a/src/image/imagefx.ts b/src/image/imagefx.ts index 0db4ba2b..243cd699 100644 --- a/src/image/imagefx.ts +++ b/src/image/imagefx.ts @@ -3,6 +3,8 @@ * Based on: [WebGLImageFilter](https://github.com/phoboslab/WebGLImageFilter) */ +/* eslint-disable func-names */ + import * as shaders from './imagefxshaders'; import { canvas } from './image'; import { log } from '../util/util'; @@ -47,7 +49,7 @@ class GLProgram { } compile = (source, type): WebGLShader | null => { - const shader = this.gl.createShader(type) as WebGLShader; + const shader = this.gl.createShader(type); if (!shader) { log('filter: could not create shader'); return null; @@ -107,11 +109,11 @@ export function GLImageFilter() { } function createFramebufferTexture(width, height) { - const fbo = gl.createFramebuffer() as WebGLFramebuffer; + const fbo = gl.createFramebuffer(); gl.bindFramebuffer(gl.FRAMEBUFFER, fbo); const renderbuffer = gl.createRenderbuffer(); gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer); - const texture = gl.createTexture() as WebGLTexture; + const texture = gl.createTexture(); gl.bindTexture(gl.TEXTURE_2D, texture); gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); @@ -154,7 +156,7 @@ export function GLImageFilter() { if (shaderProgramCache[fragmentSource]) { currentProgram = shaderProgramCache[fragmentSource]; gl.useProgram((currentProgram ? currentProgram.id : null) || null); - return currentProgram as GLProgram; + return currentProgram; } currentProgram = new GLProgram(gl, shaders.vertexIdentity, fragmentSource); if (!currentProgram) { @@ -168,7 +170,7 @@ export function GLImageFilter() { gl.enableVertexAttribArray(currentProgram.attribute['uv']); gl.vertexAttribPointer(currentProgram.attribute['uv'], 2, gl.FLOAT, false, vertSize, 2 * floatSize); shaderProgramCache[fragmentSource] = currentProgram; - return currentProgram as GLProgram; + return currentProgram; } const filter = { @@ -397,8 +399,7 @@ export function GLImageFilter() { // @ts-ignore this this.add = function (name) { - // eslint-disable-next-line prefer-rest-params - const args = Array.prototype.slice.call(arguments, 1); + const args = Array.prototype.slice.call(arguments, 1); // eslint-disable-line prefer-rest-params const func = filter[name]; filterChain.push({ func, args }); }; diff --git a/src/models.ts b/src/models.ts index 52377574..c1def00f 100644 --- a/src/models.ts +++ b/src/models.ts @@ -61,7 +61,7 @@ export class Models { antispoof: null | GraphModel | Promise = null; } -export type ModelStats = { +export interface ModelStats { numLoadedModels: number, numEnabledModels: undefined, numDefinedModels: number, @@ -112,12 +112,11 @@ export async function load(instance: Human): Promise { [instance.models.handpose, instance.models.handskeleton] = await handpose.load(instance.config); } } - if (instance.config.body.enabled && !instance.models.blazepose && instance.config.body?.modelPath?.includes('blazepose')) instance.models.blazepose = blazepose.loadPose(instance.config); - // @ts-ignore optional model - if (instance.config.body.enabled && !instance.models.blazeposedetect && instance.config.body['detector'] && instance.config.body['detector']['modelPath']) instance.models.blazeposedetect = blazepose.loadDetect(instance.config); - if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body?.modelPath?.includes('efficientpose')) instance.models.efficientpose = efficientpose.load(instance.config); - if (instance.config.body.enabled && !instance.models.movenet && instance.config.body?.modelPath?.includes('movenet')) instance.models.movenet = movenet.load(instance.config); - if (instance.config.body.enabled && !instance.models.posenet && instance.config.body?.modelPath?.includes('posenet')) instance.models.posenet = posenet.load(instance.config); + if (instance.config.body.enabled && !instance.models.blazepose && instance.config.body.modelPath?.includes('blazepose')) instance.models.blazepose = blazepose.loadPose(instance.config); + if (instance.config.body.enabled && !instance.models.blazeposedetect && instance.config.body['detector'] && instance.config.body['detector'].modelPath) instance.models.blazeposedetect = blazepose.loadDetect(instance.config); + if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body.modelPath?.includes('efficientpose')) instance.models.efficientpose = efficientpose.load(instance.config); + if (instance.config.body.enabled && !instance.models.movenet && instance.config.body.modelPath?.includes('movenet')) instance.models.movenet = movenet.load(instance.config); + if (instance.config.body.enabled && !instance.models.posenet && instance.config.body.modelPath?.includes('posenet')) instance.models.posenet = posenet.load(instance.config); if (instance.config.face.enabled && !instance.models.facedetect) instance.models.facedetect = blazeface.load(instance.config); if (instance.config.face.enabled && instance.config.face.antispoof?.enabled && !instance.models.antispoof) instance.models.antispoof = antispoof.load(instance.config); if (instance.config.face.enabled && instance.config.face.liveness?.enabled && !instance.models.liveness) instance.models.liveness = liveness.load(instance.config); @@ -125,19 +124,15 @@ export async function load(instance: Human): Promise { if (instance.config.face.enabled && instance.config.face.emotion?.enabled && !instance.models.emotion) instance.models.emotion = emotion.load(instance.config); if (instance.config.face.enabled && instance.config.face.iris?.enabled && !instance.config.face.attention?.enabled && !instance.models.faceiris) instance.models.faceiris = iris.load(instance.config); if (instance.config.face.enabled && instance.config.face.mesh?.enabled && !instance.models.facemesh) instance.models.facemesh = facemesh.load(instance.config); - // @ts-ignore optional model if (instance.config.face.enabled && instance.config.face['gear']?.enabled && !instance.models.gear) instance.models.gear = gear.load(instance.config); - // @ts-ignore optional model if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetage) instance.models.ssrnetage = ssrnetAge.load(instance.config); - // @ts-ignore optional model if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetgender) instance.models.ssrnetgender = ssrnetGender.load(instance.config); - // @ts-ignore optional model if (instance.config.face.enabled && instance.config.face['mobilefacenet']?.enabled && !instance.models.mobilefacenet) instance.models.mobilefacenet = mobilefacenet.load(instance.config); if (instance.config.face.enabled && instance.config.face['insightface']?.enabled && !instance.models.insightface) instance.models.insightface = insightface.load(instance.config); if (instance.config.hand.enabled && !instance.models.handtrack && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handtrack = handtrack.loadDetect(instance.config); if (instance.config.hand.enabled && instance.config.hand.landmarks && !instance.models.handskeleton && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handskeleton = handtrack.loadSkeleton(instance.config); - if (instance.config.object.enabled && !instance.models.centernet && instance.config.object?.modelPath?.includes('centernet')) instance.models.centernet = centernet.load(instance.config); - if (instance.config.object.enabled && !instance.models.nanodet && instance.config.object?.modelPath?.includes('nanodet')) instance.models.nanodet = nanodet.load(instance.config); + if (instance.config.object.enabled && !instance.models.centernet && instance.config.object.modelPath?.includes('centernet')) instance.models.centernet = centernet.load(instance.config); + if (instance.config.object.enabled && !instance.models.nanodet && instance.config.object.modelPath?.includes('nanodet')) instance.models.nanodet = nanodet.load(instance.config); if (instance.config.segmentation.enabled && !instance.models.segmentation) instance.models.segmentation = segmentation.load(instance.config); // models are loaded in parallel asynchronously so lets wait until they are actually loaded @@ -149,7 +144,7 @@ export async function load(instance: Human): Promise { } let instance: Human; -export type KernelOps = { name: string, url: string, missing: string[], ops: string[] } +export interface KernelOps { name: string, url: string, missing: string[], ops: string[] } export function validateModel(newInstance: Human | null, model: GraphModel | null, name: string): KernelOps | null { if (newInstance) instance = newInstance; @@ -161,10 +156,8 @@ export function validateModel(newInstance: Human | null, model: GraphModel | nul const ops: string[] = []; const missing: string[] = []; interface Op { name: string, category: string, op: string } - // @ts-ignore // modelUrl is a private method - const url = model.modelUrl; - // @ts-ignore // executor is a private method - const executor = model.executor; + const url = model['modelUrl'] as string; + const executor = model['executor']; if (executor && executor.graph.nodes) { for (const kernel of Object.values(executor.graph.nodes)) { const op = (kernel as Op).op.toLowerCase(); @@ -187,9 +180,9 @@ export function validateModel(newInstance: Human | null, model: GraphModel | nul return missing.length > 0 ? { name, missing, ops, url } : null; } -export function validate(newInstance: Human): Array<{ name: string, missing: string[] }> { +export function validate(newInstance: Human): { name: string, missing: string[] }[] { instance = newInstance; - const missing: Array = []; + const missing: KernelOps[] = []; for (const defined of Object.keys(instance.models)) { const model: GraphModel | null = instance.models[defined as keyof Models] as GraphModel | null; if (!model) continue; diff --git a/src/object/centernet.ts b/src/object/centernet.ts index e7cc0b04..fc6b0941 100644 --- a/src/object/centernet.ts +++ b/src/object/centernet.ts @@ -33,7 +33,7 @@ export async function load(config: Config): Promise { async function process(res: Tensor | null, outputShape: [number, number], config: Config) { if (!res) return []; const t: Record = {}; - const results: Array = []; + const results: ObjectResult[] = []; const detections = await res.array() as number[][][]; t.squeeze = tf.squeeze(res); const arr = tf.split(t.squeeze, 6, 1) as Tensor[]; // x1, y1, x2, y2, score, class diff --git a/src/object/nanodet.ts b/src/object/nanodet.ts index 8952ca2d..053ab8aa 100644 --- a/src/object/nanodet.ts +++ b/src/object/nanodet.ts @@ -15,7 +15,7 @@ import type { Config } from '../config'; import { env } from '../util/env'; let model: GraphModel; -let last: Array = []; +let last: ObjectResult[] = []; let lastTime = 0; let skipped = Number.MAX_SAFE_INTEGER; let inputSize = 0; @@ -33,7 +33,8 @@ export async function load(config: Config): Promise { async function process(res: Tensor[], outputShape: [number, number], config: Config) { let id = 0; - let results: Array = []; + let results: ObjectResult[] = []; + const size = inputSize; for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects // find scores, boxes, classes const baseSize = strideSize * 13; // 13x13=169, 26x26=676, 52x52=2704 @@ -50,7 +51,7 @@ async function process(res: Tensor[], outputShape: [number, number], config: Con if (score > (config.object.minConfidence || 0) && j !== 61) { const cx = (0.5 + Math.trunc(i % baseSize)) / baseSize; // center.x normalized to range 0..1 const cy = (0.5 + Math.trunc(i / baseSize)) / baseSize; // center.y normalized to range 0..1 - const boxOffset = boxIdx[i].map((a: number) => a * (baseSize / strideSize / inputSize)); // just grab indexes of features with highest scores + const boxOffset = boxIdx[i].map((a: number) => a * (baseSize / strideSize / (size))); // just grab indexes of features with highest scores const [x, y] = [ cx - (scaleBox / strideSize * boxOffset[0]), cy - (scaleBox / strideSize * boxOffset[1]), @@ -89,7 +90,7 @@ async function process(res: Tensor[], outputShape: [number, number], config: Con // unnecessary boxes and run nms only on good candidates (basically it just does IOU analysis as scores are already filtered) const nmsBoxes = results.map((a) => [a.boxRaw[1], a.boxRaw[0], a.boxRaw[3], a.boxRaw[2]]); // switches coordinates from x,y to y,x as expected by tf.nms const nmsScores = results.map((a) => a.score); - let nmsIdx: Array = []; + let nmsIdx: number[] = []; if (nmsBoxes && nmsBoxes.length > 0) { const nms = await tf.image.nonMaxSuppressionAsync(nmsBoxes, nmsScores, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence); nmsIdx = await nms.data(); diff --git a/src/result.ts b/src/result.ts index 4f7f6e75..b0b5c0a6 100644 --- a/src/result.ts +++ b/src/result.ts @@ -38,9 +38,9 @@ export interface FaceResult { /** detected face box normalized to 0..1 */ boxRaw: Box, /** detected face mesh */ - mesh: Array + mesh: Point[] /** detected face mesh normalized to 0..1 */ - meshRaw: Array, + meshRaw: Point[], /** face contours as array of 2d points normalized to 0..1 */ // contoursRaw: Array<[number, number]>, /** face contours as array of 2d points */ @@ -54,11 +54,11 @@ export interface FaceResult { /** gender detection score */ genderScore?: number, /** detected emotions */ - emotion?: Array<{ score: number, emotion: Emotion }>, + emotion?: { score: number, emotion: Emotion }[], /** detected race */ - race?: Array<{ score: number, race: Race }>, + race?: { score: number, race: Race }[], /** face descriptor */ - embedding?: Array, + embedding?: number[], /** face iris distance from camera */ iris?: number, /** face anti-spoofing result confidence */ @@ -111,7 +111,7 @@ export interface BodyResult { /** detected body box normalized to 0..1 */ boxRaw: Box, /** detected body keypoints */ - keypoints: Array + keypoints: BodyKeypoint[] /** detected body keypoints combined into annotated parts */ annotations: Record, } @@ -136,11 +136,11 @@ export interface HandResult { /** detected hand box normalized to 0..1 */ boxRaw: Box, /** detected hand keypoints */ - keypoints: Array, + keypoints: Point[], /** detected hand class */ label: HandType, /** detected hand keypoints combined into annotated parts */ - annotations: Record>, + annotations: Record, /** detected hand parts annotated with part gestures */ landmarks: Record, } @@ -192,7 +192,7 @@ export interface PersonResult { /** left and right hand results that belong to this person */ hands: { left: HandResult | null, right: HandResult | null }, /** detected gestures specific to this person */ - gestures: Array, + gestures: GestureResult[], /** box that defines the person */ box: Box, /** box that defines the person normalized to 0..1 */ @@ -206,15 +206,15 @@ export interface PersonResult { */ export interface Result { /** {@link FaceResult}: detection & analysis results */ - face: Array, + face: FaceResult[], /** {@link BodyResult}: detection & analysis results */ - body: Array, + body: BodyResult[], /** {@link HandResult}: detection & analysis results */ - hand: Array, + hand: HandResult[], /** {@link GestureResult}: detection & analysis results */ - gesture: Array, + gesture: GestureResult[], /** {@link ObjectResult}: detection & analysis results */ - object: Array + object: ObjectResult[] /** global performance object with timing values for each operation */ performance: Record, /** optional processed canvas that can be used to draw input on screen */ @@ -222,7 +222,7 @@ export interface Result { /** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */ readonly timestamp: number, /** getter property that returns unified persons object */ - persons: Array, + persons: PersonResult[], /** Last known error message */ error: string | null; } diff --git a/src/tfjs/backend.ts b/src/tfjs/backend.ts index 179427cf..cf9e4b51 100644 --- a/src/tfjs/backend.ts +++ b/src/tfjs/backend.ts @@ -52,18 +52,17 @@ export async function check(instance: Human, force = false) { // handle webgpu if (env.browser && instance.config.backend === 'webgpu') { - if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') { + if (typeof navigator === 'undefined' || typeof navigator.gpu === 'undefined') { log('override: backend set to webgpu but browser does not support webgpu'); instance.config.backend = 'humangl'; } else { - const adapter = await navigator['gpu'].requestAdapter(); + const adapter = await navigator.gpu.requestAdapter(); if (instance.config.debug) log('enumerated webgpu adapter:', adapter); if (!adapter) { log('override: backend set to webgpu but browser reports no available gpu'); instance.config.backend = 'humangl'; } else { // @ts-ignore requestAdapterInfo is not in tslib - // eslint-disable-next-line no-undef const adapterInfo = 'requestAdapterInfo' in adapter ? await (adapter as GPUAdapter).requestAdapterInfo() : undefined; // if (adapter.features) adapter.features.forEach((feature) => log('webgpu features:', feature)); log('webgpu adapter info:', adapterInfo); @@ -86,9 +85,9 @@ export async function check(instance: Human, force = false) { // customize wasm if (instance.config.backend === 'wasm') { - if (tf.env().flagRegistry['CANVAS2D_WILL_READ_FREQUENTLY']) tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true); + if (tf.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true); if (instance.config.debug) log('wasm path:', instance.config.wasmPath); - if (typeof tf?.setWasmPaths !== 'undefined') await tf.setWasmPaths(instance.config.wasmPath, instance.config.wasmPlatformFetch); + if (typeof tf.setWasmPaths !== 'undefined') await tf.setWasmPaths(instance.config.wasmPath, instance.config.wasmPlatformFetch); else throw new Error('backend error: attempting to use wasm backend but wasm path is not set'); let mt = false; let simd = false; @@ -114,15 +113,15 @@ export async function check(instance: Human, force = false) { // customize humangl if (tf.getBackend() === 'humangl') { - if (tf.env().flagRegistry['CHECK_COMPUTATION_FOR_ERRORS']) tf.env().set('CHECK_COMPUTATION_FOR_ERRORS', false); - if (tf.env().flagRegistry['WEBGL_CPU_FORWARD']) tf.env().set('WEBGL_CPU_FORWARD', true); - if (tf.env().flagRegistry['WEBGL_USE_SHAPES_UNIFORMS']) tf.env().set('WEBGL_USE_SHAPES_UNIFORMS', true); - if (tf.env().flagRegistry['CPU_HANDOFF_SIZE_THRESHOLD']) tf.env().set('CPU_HANDOFF_SIZE_THRESHOLD', 256); - if (tf.env().flagRegistry['WEBGL_EXP_CONV']) tf.env().set('WEBGL_EXP_CONV', true); // - if (tf.env().flagRegistry['USE_SETTIMEOUTCUSTOM']) tf.env().set('USE_SETTIMEOUTCUSTOM', true); // + if (tf.env().flagRegistry.CHECK_COMPUTATION_FOR_ERRORS) tf.env().set('CHECK_COMPUTATION_FOR_ERRORS', false); + if (tf.env().flagRegistry.WEBGL_CPU_FORWARD) tf.env().set('WEBGL_CPU_FORWARD', true); + if (tf.env().flagRegistry.WEBGL_USE_SHAPES_UNIFORMS) tf.env().set('WEBGL_USE_SHAPES_UNIFORMS', true); + if (tf.env().flagRegistry.CPU_HANDOFF_SIZE_THRESHOLD) tf.env().set('CPU_HANDOFF_SIZE_THRESHOLD', 256); + if (tf.env().flagRegistry.WEBGL_EXP_CONV) tf.env().set('WEBGL_EXP_CONV', true); // + if (tf.env().flagRegistry.USE_SETTIMEOUTCUSTOM) tf.env().set('USE_SETTIMEOUTCUSTOM', true); // // if (tf.env().flagRegistry['WEBGL_PACK_DEPTHWISECONV']) tf.env().set('WEBGL_PACK_DEPTHWISECONV', false); // if (if (tf.env().flagRegistry['WEBGL_FORCE_F16_TEXTURES']) && !instance.config.object.enabled) tf.env().set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision - if (typeof instance.config['deallocate'] !== 'undefined' && instance.config['deallocate']) { // hidden param + if (typeof instance.config.deallocate !== 'undefined' && instance.config.deallocate) { // hidden param log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true); tf.env().set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0); } @@ -154,7 +153,7 @@ export async function check(instance: Human, force = false) { } // register fake missing tfjs ops -export function fakeOps(kernelNames: Array, config) { +export function fakeOps(kernelNames: string[], config) { // if (config.debug) log('registerKernel:', kernelNames); for (const kernelName of kernelNames) { const kernelConfig = { diff --git a/src/tfjs/humangl.ts b/src/tfjs/humangl.ts index 3757b70e..966cec3a 100644 --- a/src/tfjs/humangl.ts +++ b/src/tfjs/humangl.ts @@ -13,7 +13,7 @@ export const config = { priority: 999, canvas: null, gl: null, - extensions: [], + extensions: [], webGLattr: { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.2 alpha: false, antialias: false, @@ -33,7 +33,7 @@ function extensions(): void { */ const gl = config.gl; if (!gl) return; - config.extensions = gl.getSupportedExtensions() as string[]; + config.extensions = gl.getSupportedExtensions(); // gl.getExtension('KHR_parallel_shader_compile'); } @@ -62,7 +62,11 @@ export async function register(instance: Human): Promise { return; } try { - config.gl = config.canvas?.getContext('webgl2', config.webGLattr) as WebGL2RenderingContext; + config.gl = config.canvas.getContext('webgl2', config.webGLattr); + if (!config.gl) { + log('error: cannot get WebGL context'); + return; + } const glv2 = config.gl.getParameter(config.gl.VERSION).includes('2.0'); if (!glv2) { log('override: using fallback webgl backend as webgl 2.0 is not detected'); @@ -123,7 +127,7 @@ export async function register(instance: Human): Promise { return; } try { - if (tf.env().flagRegistry['WEBGL_VERSION']) tf.env().set('WEBGL_VERSION', 2); + if (tf.env().flagRegistry.WEBGL_VERSION) tf.env().set('WEBGL_VERSION', 2); } catch (err) { log('error: cannot set WebGL backend flags:', err); return; diff --git a/src/tfjs/load.ts b/src/tfjs/load.ts index 6e225f6b..5c8efbad 100644 --- a/src/tfjs/load.ts +++ b/src/tfjs/load.ts @@ -13,7 +13,7 @@ const options = { modelBasePath: '', }; -export type ModelInfo = { +export interface ModelInfo { name: string, inCache: boolean, sizeDesired: number, @@ -67,7 +67,7 @@ export async function loadModel(modelPath: string | undefined): Promise; // OffscreenCanvas should be a part of Transferable => extend all postMessage methods -interface Worker { +export interface Worker { postMessage(message: any, transfer?: Array): void; } -interface ServiceWorker { +export interface ServiceWorker { postMessage(message: any, transfer?: Array): void; } -interface MessagePort { +export interface MessagePort { postMessage(message: any, transfer?: Array): void; } -interface Window { +export interface Window { postMessage(message: any, targetOrigin: string, transfer?: Array): void; } diff --git a/src/tfjs/tfjs.esm.d.ts b/src/tfjs/tfjs.esm.d.ts index 9ac6591c..9a0eb9c2 100644 --- a/src/tfjs/tfjs.esm.d.ts +++ b/src/tfjs/tfjs.esm.d.ts @@ -1,3 +1,5 @@ +/* eslint-disable import/no-extraneous-dependencies */ + export declare const version: { 'tfjs-core': string; 'tfjs-backend-cpu': string; @@ -7,11 +9,12 @@ export declare const version: { 'tfjs-converter': string; tfjs: string; }; + export * from '@tensorflow/tfjs-core'; export * from '@tensorflow/tfjs-converter'; export * from '@tensorflow/tfjs-data'; -// export * from "@tensorflow/tfjs-layers"; -// export * from "@tensorflow/tfjs-backend-cpu"; -// export * from "@tensorflow/tfjs-backend-wasm"; -// export * from "@tensorflow/tfjs-backend-webgl"; +export * from '@tensorflow/tfjs-layers'; +export * from '@tensorflow/tfjs-backend-cpu'; +export * from '@tensorflow/tfjs-backend-wasm'; +export * from '@tensorflow/tfjs-backend-webgl'; export {}; diff --git a/src/tfjs/types.ts b/src/tfjs/types.ts index 93631e41..7ad33ea6 100644 --- a/src/tfjs/types.ts +++ b/src/tfjs/types.ts @@ -4,16 +4,15 @@ * TensorFlow Tensor type * @external */ -export type { Tensor, TensorLike, Rank } from '@tensorflow/tfjs-core/dist/index'; +export type { Tensor, TensorLike, Rank } from '@tensorflow/tfjs-core/dist/index'; // eslint-disable-line import/no-extraneous-dependencies /** * TensorFlow GraphModel type * @external */ -export type { GraphModel } from '@tensorflow/tfjs-converter/dist/index'; +export type { GraphModel } from '@tensorflow/tfjs-converter/dist/index'; // eslint-disable-line import/no-extraneous-dependencies /** Tensorflow Long type * @external long */ -// eslint-disable-next-line node/no-missing-import // export type { Long } from 'long'; diff --git a/src/util/box.ts b/src/util/box.ts index c0bcdb3c..2898fc65 100644 --- a/src/util/box.ts +++ b/src/util/box.ts @@ -1,6 +1,6 @@ import type { Point, Box } from '../result'; -export function calc(keypoints: Array, outputSize: [number, number] = [1, 1]) { +export function calc(keypoints: Point[], outputSize: [number, number] = [1, 1]) { const coords = [keypoints.map((pt) => pt[0]), keypoints.map((pt) => pt[1])]; // all x/y coords const min = [Math.min(...coords[0]), Math.min(...coords[1])]; const max = [Math.max(...coords[0]), Math.max(...coords[1])]; @@ -9,7 +9,7 @@ export function calc(keypoints: Array, outputSize: [number, number] = [1, return { box, boxRaw }; } -export function square(keypoints: Array, outputSize: [number, number] = [1, 1]) { +export function square(keypoints: Point[], outputSize: [number, number] = [1, 1]) { const coords = [keypoints.map((pt) => pt[0]), keypoints.map((pt) => pt[1])]; // all x/y coords const min = [Math.min(...coords[0]), Math.min(...coords[1])]; const max = [Math.max(...coords[0]), Math.max(...coords[1])]; diff --git a/src/util/env.ts b/src/util/env.ts index 2dce4622..af327522 100644 --- a/src/util/env.ts +++ b/src/util/env.ts @@ -124,8 +124,8 @@ export class Env { // analyze backends this.backends = Object.keys(tf.engine().registryFactory); this.tensorflow = { - version: (tf.backend()['binding'] ? tf.backend()['binding']['TF_Version'] : undefined), - gpu: (tf.backend()['binding'] ? tf.backend()['binding'].isUsingGpuDevice() : undefined), + version: (tf.backend().binding ? tf.backend().binding.TF_Version : undefined), + gpu: (tf.backend().binding ? tf.backend().binding.isUsingGpuDevice() : undefined), }; this.wasm.supported = typeof WebAssembly !== 'undefined'; this.wasm.backend = this.backends.includes('wasm'); @@ -139,19 +139,19 @@ export class Env { this.webgl.supported = typeof ctx !== 'undefined'; this.webgl.backend = this.backends.includes('webgl'); if (this.webgl.supported && this.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) { - // @ts-ignore getGPGPUContext only exists on WebGL backend const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null; if (gl) { this.webgl.version = gl.getParameter(gl.VERSION); this.webgl.renderer = gl.getParameter(gl.RENDERER); } } - // @ts-ignore navigator.gpu is only defined when webgpu is available in browser - this.webgpu.supported = this.browser && typeof navigator['gpu'] !== 'undefined'; + this.webgpu.supported = this.browser && typeof navigator.gpu !== 'undefined'; this.webgpu.backend = this.backends.includes('webgpu'); try { - // @ts-ignore navigator.gpu is only defined when webgpu is available in browser - if (this.webgpu.supported) this.webgpu.adapter = (await navigator['gpu'].requestAdapter()).name; + if (this.webgpu.supported) { + const adapter = await navigator.gpu.requestAdapter(); + this.webgpu.adapter = adapter ? adapter.name : undefined; + } } catch { this.webgpu.supported = false; } @@ -175,8 +175,8 @@ export class Env { } catch { } */ } - if (!this['cpu']) Object.defineProperty(this, 'cpu', { value: cpu }); - else this['cpu'] = cpu; + if (!this.cpu) Object.defineProperty(this, 'cpu', { value: cpu }); + else this.cpu = cpu; } } diff --git a/src/util/interpolate.ts b/src/util/interpolate.ts index a8728723..c2ae8509 100644 --- a/src/util/interpolate.ts +++ b/src/util/interpolate.ts @@ -36,7 +36,7 @@ export function calc(newResult: Result, config: Config): Result { // interpolate body results if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) { - bufferedResult.body = JSON.parse(JSON.stringify(newResult.body as BodyResult[])); // deep clone once + bufferedResult.body = JSON.parse(JSON.stringify(newResult.body)) as BodyResult[]; // deep clone once } else { for (let i = 0; i < newResult.body.length; i++) { const box = newResult.body[i].box // update box @@ -46,7 +46,7 @@ export function calc(newResult: Result, config: Config): Result { const keypoints = (newResult.body[i].keypoints // update keypoints .map((newKpt, j) => ({ score: newKpt.score, - part: newKpt.part as BodyLandmark, + part: newKpt.part, position: [ bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].position[0] || 0) + (newKpt.position[0] || 0)) / bufferedFactor : newKpt.position[0], bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].position[1] || 0) + (newKpt.position[1] || 0)) / bufferedFactor : newKpt.position[1], @@ -62,15 +62,15 @@ export function calc(newResult: Result, config: Config): Result { bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].distance?.[1] || 0) + (newKpt.distance?.[1] || 0)) / bufferedFactor : newKpt.distance?.[1], bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].distance?.[2] || 0) + (newKpt.distance?.[2] || 0)) / bufferedFactor : newKpt.distance?.[2], ], - }))) as Array<{ score: number, part: BodyLandmark, position: [number, number, number?], positionRaw: [number, number, number?] }>; + }))) as { score: number, part: BodyLandmark, position: [number, number, number?], positionRaw: [number, number, number?] }[]; const annotations: Record = {} as Record; // recreate annotations let coords = { connected: {} }; - if (config.body?.modelPath?.includes('efficientpose')) coords = efficientPoseCoords; - else if (config.body?.modelPath?.includes('blazepose')) coords = blazePoseCoords; - else if (config.body?.modelPath?.includes('movenet')) coords = moveNetCoords; + if (config.body.modelPath?.includes('efficientpose')) coords = efficientPoseCoords; + else if (config.body.modelPath?.includes('blazepose')) coords = blazePoseCoords; + else if (config.body.modelPath?.includes('movenet')) coords = moveNetCoords; for (const [name, indexes] of Object.entries(coords.connected as Record)) { - const pt: Array = []; + const pt: Point[][] = []; for (let j = 0; j < indexes.length - 1; j++) { const pt0 = keypoints.find((kp) => kp.part === indexes[j]); const pt1 = keypoints.find((kp) => kp.part === indexes[j + 1]); @@ -79,13 +79,13 @@ export function calc(newResult: Result, config: Config): Result { } annotations[name] = pt; } - bufferedResult.body[i] = { ...newResult.body[i], box, boxRaw, keypoints, annotations: annotations as BodyResult['annotations'] }; // shallow clone plus updated values + bufferedResult.body[i] = { ...newResult.body[i], box, boxRaw, keypoints, annotations }; // shallow clone plus updated values } } // interpolate hand results if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) { - bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand as HandResult[])); // deep clone once + bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand)); // deep clone once } else { for (let i = 0; i < newResult.hand.length; i++) { const box = (newResult.hand[i].box// update box @@ -116,7 +116,7 @@ export function calc(newResult: Result, config: Config): Result { // interpolate face results if (!bufferedResult.face || (newResult.face.length !== bufferedResult.face.length)) { - bufferedResult.face = JSON.parse(JSON.stringify(newResult.face as FaceResult[])); // deep clone once + bufferedResult.face = JSON.parse(JSON.stringify(newResult.face)) as FaceResult[]; // deep clone once } else { for (let i = 0; i < newResult.face.length; i++) { const box = (newResult.face[i].box // update box @@ -131,14 +131,14 @@ export function calc(newResult: Result, config: Config): Result { } = { matrix: [0, 0, 0, 0, 0, 0, 0, 0, 0], angle: { roll: 0, yaw: 0, pitch: 0 }, gaze: { bearing: 0, strength: 0 } }; rotation.matrix = newResult.face[i].rotation?.matrix as [number, number, number, number, number, number, number, number, number]; rotation.angle = { - roll: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle?.roll || 0) + (newResult.face[i].rotation?.angle?.roll || 0)) / bufferedFactor, - yaw: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle?.yaw || 0) + (newResult.face[i].rotation?.angle?.yaw || 0)) / bufferedFactor, - pitch: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle?.pitch || 0) + (newResult.face[i].rotation?.angle?.pitch || 0)) / bufferedFactor, + roll: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle.roll || 0) + (newResult.face[i].rotation?.angle.roll || 0)) / bufferedFactor, + yaw: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle.yaw || 0) + (newResult.face[i].rotation?.angle.yaw || 0)) / bufferedFactor, + pitch: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle.pitch || 0) + (newResult.face[i].rotation?.angle.pitch || 0)) / bufferedFactor, }; rotation.gaze = { // not fully correct due projection on circle, also causes wrap-around draw on jump from negative to positive - bearing: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.gaze?.bearing || 0) + (newResult.face[i].rotation?.gaze?.bearing || 0)) / bufferedFactor, - strength: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.gaze?.strength || 0) + (newResult.face[i].rotation?.gaze?.strength || 0)) / bufferedFactor, + bearing: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.gaze.bearing || 0) + (newResult.face[i].rotation?.gaze.bearing || 0)) / bufferedFactor, + strength: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.gaze.strength || 0) + (newResult.face[i].rotation?.gaze.strength || 0)) / bufferedFactor, }; bufferedResult.face[i] = { ...newResult.face[i], rotation, box, boxRaw }; // shallow clone plus updated values } @@ -148,7 +148,7 @@ export function calc(newResult: Result, config: Config): Result { // interpolate object detection results if (!bufferedResult.object || (newResult.object.length !== bufferedResult.object.length)) { - bufferedResult.object = JSON.parse(JSON.stringify(newResult.object as ObjectResult[])); // deep clone once + bufferedResult.object = JSON.parse(JSON.stringify(newResult.object)) as ObjectResult[]; // deep clone once } else { for (let i = 0; i < newResult.object.length; i++) { const box = (newResult.object[i].box // update box @@ -163,7 +163,7 @@ export function calc(newResult: Result, config: Config): Result { if (newResult.persons) { const newPersons = newResult.persons; // trigger getter function if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) { - bufferedResult.persons = JSON.parse(JSON.stringify(newPersons as PersonResult[])); + bufferedResult.persons = JSON.parse(JSON.stringify(newPersons)) as PersonResult[]; } else { for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow bufferedResult.persons[i].box = (newPersons[i].box diff --git a/src/util/persons.ts b/src/util/persons.ts index 14e4a2ab..76373e66 100644 --- a/src/util/persons.ts +++ b/src/util/persons.ts @@ -4,9 +4,9 @@ import type { FaceResult, BodyResult, HandResult, GestureResult, PersonResult, Box } from '../result'; -export function join(faces: Array, bodies: Array, hands: Array, gestures: Array, shape: Array | undefined): Array { +export function join(faces: FaceResult[], bodies: BodyResult[], hands: HandResult[], gestures: GestureResult[], shape: number[] | undefined): PersonResult[] { let id = 0; - const persons: Array = []; + const persons: PersonResult[] = []; for (const face of faces) { // person is defined primarily by face and then we append other objects as found const person: PersonResult = { id: id++, face, body: null, hands: { left: null, right: null }, gestures: [], box: [0, 0, 0, 0] }; for (const body of bodies) { @@ -34,11 +34,11 @@ export function join(faces: Array, bodies: Array, hands: } } for (const gesture of gestures) { // append all gestures according to ids - if (gesture['face'] !== undefined && gesture['face'] === face.id) person.gestures?.push(gesture); - else if (gesture['iris'] !== undefined && gesture['iris'] === face.id) person.gestures?.push(gesture); - else if (gesture['body'] !== undefined && gesture['body'] === person.body?.id) person.gestures?.push(gesture); - else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.left?.id) person.gestures?.push(gesture); - else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.right?.id) person.gestures?.push(gesture); + if (gesture['face'] !== undefined && gesture['face'] === face.id) person.gestures.push(gesture); + else if (gesture['iris'] !== undefined && gesture['iris'] === face.id) person.gestures.push(gesture); + else if (gesture['body'] !== undefined && gesture['body'] === person.body?.id) person.gestures.push(gesture); + else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands.left?.id) person.gestures.push(gesture); + else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands.right?.id) person.gestures.push(gesture); } // create new overarching box from all boxes belonging to person @@ -50,10 +50,10 @@ export function join(faces: Array, bodies: Array, hands: y.push(box[1], box[1] + box[3]); } }; - extractXY(person.face?.box); + extractXY(person.face.box); extractXY(person.body?.box); - extractXY(person.hands?.left?.box); - extractXY(person.hands?.right?.box); + extractXY(person.hands.left?.box); + extractXY(person.hands.right?.box); const minX = Math.min(...x); const minY = Math.min(...y); person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box diff --git a/src/util/profile.ts b/src/util/profile.ts index e8b426f8..c2685f98 100644 --- a/src/util/profile.ts +++ b/src/util/profile.ts @@ -7,20 +7,28 @@ import { log } from './util'; export const data = {}; -export function run(modelName: string, profileData: Record): void { // profileData is tfjs internal type +export type ProfileData = { + newBytes: number, + peakBytes: number, + newTensors: number, + kernels: Array<{ + id: number, + kernelTimeMs: number, + totalBytesSnapshot: number, + }>, +} + +export function run(modelName: string, profileData: ProfileData): void { // profileData is tfjs internal type if (!profileData || !profileData.kernels) return; const maxDetected = 5; - // @ts-ignore profileData.kernels is tfjs internal type - const time = (profileData.kernels as Array) + const time = (profileData.kernels) .filter((a) => a.kernelTimeMs > 0) .reduce((a, b) => a += b.kernelTimeMs, 0); - // @ts-ignore profileData.kernels is tfjs internal type - const slowest = (profileData.kernels as Array) + const slowest = (profileData.kernels) .map((a, i) => { a.id = i; return a; }) .filter((a) => a.kernelTimeMs > 0) .sort((a, b) => b.kernelTimeMs - a.kernelTimeMs); - // @ts-ignore profileData.kernels is tfjs internal type - const largest = (profileData.kernels as Array) + const largest = (profileData.kernels) .map((a, i) => { a.id = i; return a; }) .filter((a) => a.totalBytesSnapshot > 0) .sort((a, b) => b.totalBytesSnapshot - a.totalBytesSnapshot); @@ -31,7 +39,7 @@ export function run(modelName: string, profileData: Record): vo newBytes: profileData.newBytes, newTensors: profileData.newTensors, peakBytes: profileData.peakBytes, - numKernelOps: (profileData['kernels'] as Array).length, + numKernelOps: (profileData.kernels).length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest, diff --git a/src/util/util.ts b/src/util/util.ts index f9fd2bf0..3f8b4100 100644 --- a/src/util/util.ts +++ b/src/util/util.ts @@ -8,8 +8,7 @@ import type { Config } from '../exports'; export function log(...msg): void { const dt = new Date(); const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`; - // eslint-disable-next-line no-console - if (msg) console.log(ts, 'Human:', ...msg); + if (msg) console.log(ts, 'Human:', ...msg); // eslint-disable-line no-console } // helper function: join two paths @@ -28,7 +27,7 @@ export const now = () => { }; // helper function: checks current config validity -export function validate(defaults: Partial, config: Partial, parent = 'config', msgs: Array<{ reason: string, where: string, expected?: string }> = []) { +export function validate(defaults: Partial, config: Partial, parent = 'config', msgs: { reason: string, where: string, expected?: string }[] = []) { for (const key of Object.keys(config)) { if (typeof config[key] === 'object') { validate(defaults[key], config[key], key, msgs); @@ -60,7 +59,7 @@ export function mergeDeep(...objects) { } // helper function: return min and max from input array -export const minmax = (data: Array) => data.reduce((acc: Array, val) => { +export const minmax = (data: number[]) => data.reduce((acc: number[], val) => { acc[0] = (acc[0] === undefined || val < acc[0]) ? val : acc[0]; acc[1] = (acc[1] === undefined || val > acc[1]) ? val : acc[1]; return acc; diff --git a/src/warmup.ts b/src/warmup.ts index 03fbf9bb..bb51e529 100644 --- a/src/warmup.ts +++ b/src/warmup.ts @@ -10,12 +10,11 @@ import { env } from './util/env'; import type { Config } from './config'; import type { Result } from './result'; import type { Human, Models } from './human'; -import type { Tensor } from './tfjs/types'; async function warmupBitmap(instance: Human): Promise { const b64toBlob = (base64: string, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob()); - let blob; - let res; + let blob: Blob | null; + let res: Result | undefined; switch (instance.config.warmup) { case 'face': blob = await b64toBlob(sample.face); break; case 'body': @@ -32,7 +31,7 @@ async function warmupBitmap(instance: Human): Promise { async function warmupCanvas(instance: Human): Promise { return new Promise((resolve) => { - let src; + let src: string; // let size = 0; switch (instance.config.warmup) { case 'face': @@ -45,7 +44,7 @@ async function warmupCanvas(instance: Human): Promise { src = 'data:image/jpeg;base64,' + sample.body; break; default: - src = null; + src = ''; } // src = encodeURI('../assets/human-sample-upper.jpg'); let img: HTMLImageElement; @@ -63,7 +62,7 @@ async function warmupCanvas(instance: Human): Promise { if (ctx) ctx.drawImage(img, 0, 0); // const data = ctx?.getImageData(0, 0, canvas.height, canvas.width); const tensor = await instance.image(canvas); - const res = await instance.detect(tensor.tensor as Tensor, instance.config); + const res = tensor.tensor ? await instance.detect(tensor.tensor, instance.config) : undefined; resolve(res); } }; @@ -79,8 +78,7 @@ async function warmupNode(instance: Human): Promise { else img = atob(sample.body); let res; if (('node' in tf) && (tf.getBackend() === 'tensorflow')) { - // @ts-ignore tf.node may be undefined - const data = tf['node'].decodeJpeg(img); + const data = tf['node'].decodeJpeg(img); // eslint-disable-line import/namespace const expanded = data.expandDims(0); instance.tf.dispose(data); // log('Input:', expanded); @@ -109,7 +107,7 @@ async function runInference(instance: Human) { /** Runs pre-compile on all loaded models */ export async function runCompile(allModels: Models) { - if (!tf.env().flagRegistry['ENGINE_COMPILE_ONLY']) return; // tfjs does not support compile-only inference + if (!tf.env().flagRegistry.ENGINE_COMPILE_ONLY) return; // tfjs does not support compile-only inference const backendType = tf.getBackend(); const webGLBackend = tf.backend(); if ((backendType !== 'webgl' && backendType !== 'humangl') || (!webGLBackend || !webGLBackend.checkCompileCompletion)) { diff --git a/test/browser.html b/test/browser.html index 5c1ebb58..7a30f1a4 100644 --- a/test/browser.html +++ b/test/browser.html @@ -24,150 +24,7 @@

     
- - + + diff --git a/test/browser.js b/test/browser.js new file mode 100644 index 00000000..d33ee41a --- /dev/null +++ b/test/browser.js @@ -0,0 +1,142 @@ +import { Human } from '../dist/human.esm.js'; + +const config = { + async: true, + warmup: 'none', + debug: true, + cacheSensitivity: 0, + object: { enabled: true }, +}; + +const backends = ['wasm', 'humangl', 'webgl', 'webgpu']; + +const start = performance.now(); + +function str(long, ...msg) { + if (!Array.isArray(msg)) return msg; + let line = ''; + for (const entry of msg) { + if (typeof entry === 'object') line += ' ' + JSON.stringify(entry, null, long ? 2 : 0).replace(/"/g, '').replace(/,/g, ', ').replace(/:/g, ': '); + else line += ' ' + entry; + } + return line + '\n'; +} + +let last = new Date(); +async function log(...msgs) { + const dt = new Date(); + const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`; + const elap = (dt - last).toString().padStart(5, '0'); + document.getElementById('log').innerHTML += ts + ' +' + elap + 'ms  ' + str(false, ...msgs); + document.documentElement.scrollTop = document.documentElement.scrollHeight; + console.log(ts, elap, ...msgs); // eslint-disable-line no-console + last = dt; +} + +async function detailed(...msgs) { + const dt = new Date(); + const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`; + const elap = (dt - last).toString().padStart(5, '0'); + document.getElementById('log').innerHTML += ts + ' +' + elap + 'ms  ' + str(true, ...msgs); + document.documentElement.scrollTop = document.documentElement.scrollHeight; + console.log(ts, elap, ...msgs); // eslint-disable-line no-console + last = dt; +} + +async function image(url) { + const el = document.createElement('img'); + el.id = 'image'; + const loaded = new Promise((resolve) => { el.onload = () => resolve(true); }); + el.src = url; + await loaded; + return el; +} + +async function wait(time) { + const waiting = new Promise((resolve) => { setTimeout(() => resolve(), time); }); + await waiting; +} + +function draw(canvas = null) { + const c = document.getElementById('canvas'); + const ctx = c.getContext('2d'); + if (canvas) ctx.drawImage(canvas, 0, 0, c.width, c.height); + else ctx.clearRect(0, 0, c.width, c.height); +} + +async function events(event) { + document.getElementById('events').innerText = `${Math.round(performance.now() - start)}ms Event: ${event}`; +} + +async function main() { + log('human tests'); + let res; + const human = new Human(config); + await human.init(); + human.events.addEventListener('warmup', () => events('warmup')); + human.events.addEventListener('image', () => events('image')); + human.events.addEventListener('detect', () => events('detect')); + const timer = setInterval(() => { document.getElementById('state').innerText = `State: ${human.state}`; }, 10); + log('version', human.version); + log('tfjs', human.tf.version.tfjs); + + const env = JSON.parse(JSON.stringify(human.env)); + env.kernels = human.env.kernels.length; + detailed('environment', env); + + detailed('config', human.config); + await human.load(); + const models = Object.keys(human.models).map((model) => ({ name: model, loaded: (human.models[model] !== null) })); + log('models', models); + for (const backend of backends) { + log(); + log('test start:', backend); + human.config.backend = backend; + await human.init(); + log('desired', backend, 'detected', human.tf.getBackend()); + if (human.tf.getBackend() !== backend) { + continue; + } + log('memory', human.tf.memory()); + res = await human.validate(); + log('validate', res); + res = await human.warmup({ warmup: 'face' }); + draw(res.canvas); + log('warmup', 'face'); + let img = await image('../../samples/in/ai-body.jpg'); + const input = await human.image(img); + log('input', input.tensor.shape); + draw(res.canvas); + res = await human.detect(input.tensor); + log('detect'); + human.next(); + log('interpolate'); + const persons = res.persons; + log('persons'); + log('summary', { persons: persons.length, face: res.face.length, body: res.body.length, hand: res.hand.length, object: res.object.length, gesture: res.gesture.length }); + log('performance', human.performance); + human.tf.dispose(input.tensor); + draw(); + + img = await image('../../samples/in/ai-face.jpg'); + for (const val of [0, 0.25, 0.5, 0.75, 10]) { + human.performance = {}; + const t0 = performance.now(); + for (let i = 0; i < 10; i++) { + res = await human.detect(img, { cacheSensitivity: val, filter: { pixelate: 5 * i }, object: { enabled: false } }); + draw(res.canvas); + } + const t1 = performance.now(); + log('benchmark', { time: Math.round((t1 - t0) / 10), cacheSensitivity: val, performance: human.performance }); + await wait(10); + } + draw(); + + log('memory', human.tf.memory()); + } + clearInterval(timer); + log(); + log('tests complete'); +} + +main(); diff --git a/test/build.log b/test/build.log index 93a90119..90130f9d 100644 --- a/test/build.log +++ b/test/build.log @@ -1,39 +1,39 @@ -2022-08-20 09:29:24 DATA:  Build {"name":"@vladmandic/human","version":"2.9.4"} -2022-08-20 09:29:24 INFO:  Application: {"name":"@vladmandic/human","version":"2.9.4"} -2022-08-20 09:29:24 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true} -2022-08-20 09:29:24 INFO:  Toolchain: {"build":"0.7.10","esbuild":"0.15.5","typescript":"4.7.4","typedoc":"0.23.10","eslint":"8.22.0"} -2022-08-20 09:29:24 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]} -2022-08-20 09:29:24 STATE: Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]} -2022-08-20 09:29:24 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":608} -2022-08-20 09:29:24 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":75,"inputBytes":655391,"outputBytes":307474} -2022-08-20 09:29:24 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":612} -2022-08-20 09:29:24 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":75,"inputBytes":655395,"outputBytes":307478} -2022-08-20 09:29:24 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":664} -2022-08-20 09:29:24 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":75,"inputBytes":655447,"outputBytes":307528} -2022-08-20 09:29:24 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1069,"outputBytes":358} -2022-08-20 09:29:24 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1032,"outputBytes":583} -2022-08-20 09:29:24 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":75,"inputBytes":655366,"outputBytes":306323} -2022-08-20 09:29:25 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":11,"inputBytes":1271,"outputBytes":2787569} -2022-08-20 09:29:25 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":75,"inputBytes":3442352,"outputBytes":1669183} -2022-08-20 09:29:25 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":75,"inputBytes":3442352,"outputBytes":3073108} -2022-08-20 09:29:29 STATE: Typings: {"input":"src/human.ts","output":"types/lib","files":30} -2022-08-20 09:29:31 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":77,"generated":true} -2022-08-20 09:29:31 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6716,"outputBytes":3141} -2022-08-20 09:29:31 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":15629,"outputBytes":7798} -2022-08-20 09:29:39 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":109,"errors":0,"warnings":0} -2022-08-20 09:29:39 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"} -2022-08-20 09:29:39 STATE: Copy: {"input":"tfjs/tfjs.esm.d.ts"} -2022-08-20 09:29:39 INFO:  Done... -2022-08-20 09:29:39 STATE: API-Extractor: {"succeeeded":true,"errors":0,"warnings":198} -2022-08-20 09:29:39 STATE: Copy: {"input":"types/human.d.ts"} -2022-08-20 09:29:39 INFO:  Analyze models: {"folders":8,"result":"models/models.json"} -2022-08-20 09:29:39 STATE: Models {"folder":"./models","models":13} -2022-08-20 09:29:39 STATE: Models {"folder":"../human-models/models","models":42} -2022-08-20 09:29:39 STATE: Models {"folder":"../blazepose/model/","models":4} -2022-08-20 09:29:39 STATE: Models {"folder":"../anti-spoofing/model","models":1} -2022-08-20 09:29:39 STATE: Models {"folder":"../efficientpose/models","models":3} -2022-08-20 09:29:39 STATE: Models {"folder":"../insightface/models","models":5} -2022-08-20 09:29:39 STATE: Models {"folder":"../movenet/models","models":3} -2022-08-20 09:29:39 STATE: Models {"folder":"../nanodet/models","models":4} -2022-08-20 09:29:40 STATE: Models: {"count":57,"totalSize":383017442} -2022-08-20 09:29:40 INFO:  Human Build complete... {"logFile":"test/build.log"} +2022-08-21 13:32:23 DATA:  Build {"name":"@vladmandic/human","version":"2.9.4"} +2022-08-21 13:32:23 INFO:  Application: {"name":"@vladmandic/human","version":"2.9.4"} +2022-08-21 13:32:23 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true} +2022-08-21 13:32:23 INFO:  Toolchain: {"build":"0.7.10","esbuild":"0.15.5","typescript":"4.7.4","typedoc":"0.23.10","eslint":"8.22.0"} +2022-08-21 13:32:23 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]} +2022-08-21 13:32:23 STATE: Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]} +2022-08-21 13:32:23 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":159,"outputBytes":608} +2022-08-21 13:32:23 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":75,"inputBytes":652939,"outputBytes":306172} +2022-08-21 13:32:23 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":167,"outputBytes":612} +2022-08-21 13:32:23 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":75,"inputBytes":652943,"outputBytes":306176} +2022-08-21 13:32:23 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":206,"outputBytes":664} +2022-08-21 13:32:23 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":75,"inputBytes":652995,"outputBytes":306226} +2022-08-21 13:32:23 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1125,"outputBytes":358} +2022-08-21 13:32:23 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1088,"outputBytes":583} +2022-08-21 13:32:23 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":75,"inputBytes":652914,"outputBytes":305046} +2022-08-21 13:32:23 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":11,"inputBytes":1344,"outputBytes":2787569} +2022-08-21 13:32:23 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":75,"inputBytes":3439900,"outputBytes":1667925} +2022-08-21 13:32:23 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":75,"inputBytes":3439900,"outputBytes":3070714} +2022-08-21 13:32:28 STATE: Typings: {"input":"src/human.ts","output":"types/lib","files":30} +2022-08-21 13:32:29 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":77,"generated":true} +2022-08-21 13:32:29 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6699,"outputBytes":3141} +2022-08-21 13:32:29 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":15549,"outputBytes":7741} +2022-08-21 13:32:39 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":111,"errors":0,"warnings":0} +2022-08-21 13:32:39 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"} +2022-08-21 13:32:39 STATE: Copy: {"input":"tfjs/tfjs.esm.d.ts"} +2022-08-21 13:32:39 INFO:  Done... +2022-08-21 13:32:40 STATE: API-Extractor: {"succeeeded":true,"errors":0,"warnings":198} +2022-08-21 13:32:40 STATE: Copy: {"input":"types/human.d.ts"} +2022-08-21 13:32:40 INFO:  Analyze models: {"folders":8,"result":"models/models.json"} +2022-08-21 13:32:40 STATE: Models {"folder":"./models","models":13} +2022-08-21 13:32:40 STATE: Models {"folder":"../human-models/models","models":42} +2022-08-21 13:32:40 STATE: Models {"folder":"../blazepose/model/","models":4} +2022-08-21 13:32:40 STATE: Models {"folder":"../anti-spoofing/model","models":1} +2022-08-21 13:32:40 STATE: Models {"folder":"../efficientpose/models","models":3} +2022-08-21 13:32:40 STATE: Models {"folder":"../insightface/models","models":5} +2022-08-21 13:32:40 STATE: Models {"folder":"../movenet/models","models":3} +2022-08-21 13:32:40 STATE: Models {"folder":"../nanodet/models","models":4} +2022-08-21 13:32:40 STATE: Models: {"count":57,"totalSize":383017442} +2022-08-21 13:32:40 INFO:  Human Build complete... {"logFile":"test/build.log"} diff --git a/test/node.js b/test/node.js index a5613454..da2d66da 100644 --- a/test/node.js +++ b/test/node.js @@ -76,7 +76,6 @@ function logStdIO(ok, test, buffer) { } } -// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars async function runTest(test) { log.info(); log.info(test, 'start'); @@ -90,7 +89,6 @@ async function runTest(test) { }); } -// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars async function runDemo(demo) { // log.info(); log.info(demo, 'start'); diff --git a/test/test-gear.js b/test/test-gear.js index bf665a26..91dd6d7f 100644 --- a/test/test-gear.js +++ b/test/test-gear.js @@ -41,14 +41,8 @@ function printResult(obj) { async function main() { log.header(); - if (process.argv.length !== 3) { - log.error('parameters: or missing'); - process.exit(1); - } - if (!fs.existsSync(process.argv[2])) { - log.error(`file not found: ${process.argv[2]}`); - process.exit(1); - } + if (process.argv.length !== 3) throw new Error('parameters: or missing'); + if (!fs.existsSync(process.argv[2])) throw new Error(`file not found: ${process.argv[2]}`); const stat = fs.statSync(process.argv[2]); const files = []; if (stat.isFile()) files.push(process.argv[2]); diff --git a/test/test-main.js b/test/test-main.js index 32d066f8..70bc79e0 100644 --- a/test/test-main.js +++ b/test/test-main.js @@ -13,7 +13,7 @@ const log = (status, ...data) => { process.on('uncaughtException', (err) => { log('error', 'uncaughtException', lastOp, err); // abort immediately - process.exit(1); + throw new Error(err); }); async function testHTTP() { @@ -23,7 +23,7 @@ async function testHTTP() { .then((res) => { if (res && res.ok) log('state', 'passed: model server:', config.modelBasePath); else log('error', 'failed: model server:', config.modelBasePath); - resolve(res && res.ok); + resolve(res && res.ok); // eslint-disable-line promise/always-return }) .catch((err) => { log('error', 'failed: model server:', err.message); @@ -262,7 +262,7 @@ async function verifyCompare(human) { async function test(Human, inputConfig) { lastOp = `test ${inputConfig}`; config = inputConfig; - fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-extraneous-require, node/no-missing-import + fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-unsupported-features/es-syntax const ok = await testHTTP(); if (!ok) { log('error', 'aborting test'); @@ -292,7 +292,7 @@ async function test(Human, inputConfig) { // test model loading log('info', 'test: model load'); await human.load(); - const models = Object.keys(human.models).map((model) => ({ name: model, loaded: (human.models[model] !== null), url: human.models[model] ? human.models[model].modelUrl : null })); + const models = Object.keys(human.models).map((model) => ({ name: model, loaded: (human.models[model] !== null), url: human.models[model] ? human.models[model]['modelUrl'] : null })); const loaded = models.filter((model) => model.loaded); if (models.length === 23 && loaded.length === 12) log('state', 'passed: models loaded', models.length, loaded.length, models); else log('error', 'failed: models loaded', models.length, loaded.length, models); @@ -339,8 +339,8 @@ async function test(Human, inputConfig) { config.async = true; config.cacheSensitivity = 0; res = await testDetect(human, 'samples/in/ai-body.jpg', 'async'); - if (!res || res?.face?.length !== 1 || res?.face[0].gender !== 'female') log('error', 'failed: default result face mismatch', res?.face?.length, res?.face[0].gender, res?.face[0].genderScore); - else log('state', 'passed: default result face match', res?.face?.length, res?.face[0].gender, res?.face[0].genderScore); + if (!res || res.face?.length !== 1 || res.face[0].gender !== 'female') log('error', 'failed: default result face mismatch', res.face?.length, res.face[0].gender, res.face[0].genderScore); + else log('state', 'passed: default result face match', res.face?.length, res.face[0].gender, res.face[0].genderScore); // test default config sync log('info', 'test sync'); @@ -348,8 +348,8 @@ async function test(Human, inputConfig) { config.async = false; config.cacheSensitivity = 0; res = await testDetect(human, 'samples/in/ai-body.jpg', 'sync'); - if (!res || res?.face?.length !== 1 || res?.face[0].gender !== 'female') log('error', 'failed: default sync', res?.face?.length, res?.face[0].gender, res?.face[0].genderScore); - else log('state', 'passed: default sync', res?.face?.length, res?.face[0].gender, res?.face[0].genderScore); + if (!res || res.face?.length !== 1 || res.face[0].gender !== 'female') log('error', 'failed: default sync', res.face?.length, res.face[0].gender, res.face[0].genderScore); + else log('state', 'passed: default sync', res.face?.length, res.face[0].gender, res.face[0].genderScore); // test image processing log('info', 'test: image process'); @@ -387,14 +387,14 @@ async function test(Human, inputConfig) { log('info', 'test object'); config.object = { enabled: true, modelPath: 'mb3-centernet.json' }; res = await testDetect(human, 'samples/in/ai-body.jpg', 'object'); - if (!res || res?.object?.length < 1 || res?.object[0]?.label !== 'person') log('error', 'failed: centernet', res?.object); + if (!res || res.object?.length < 1 || res.object[0]?.label !== 'person') log('error', 'failed: centernet', res.object); else log('state', 'passed: centernet'); - human.models['centernet'] = null; + human.models.centernet = null; config.object = { enabled: true, modelPath: 'https://vladmandic.github.io/human-models/models/nanodet.json' }; res = await testDetect(human, 'samples/in/ai-body.jpg', 'object'); - if (!res || res?.object?.length < 1 || res?.object[0]?.label !== 'person') log('error', 'failed: nanodet', res?.object); + if (!res || res.object?.length < 1 || res.object[0]?.label !== 'person') log('error', 'failed: nanodet', res.object); else log('state', 'passed: nanodet'); - human.models['nanodet'] = null; + human.models.nanodet = null; config.object.enabled = false; // test sensitive config @@ -405,13 +405,13 @@ async function test(Human, inputConfig) { config.body = { minConfidence: 0.0001 }; config.hand = { minConfidence: 0.0001 }; res = await testDetect(human, 'samples/in/ai-body.jpg', 'sensitive'); - if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 2 || res?.gesture?.length < 8) log('error', 'failed: sensitive result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length); + if (!res || res.face?.length !== 1 || res.body?.length !== 1 || res.hand?.length !== 2 || res.gesture?.length < 8) log('error', 'failed: sensitive result mismatch', res.face?.length, res.body?.length, res.hand?.length, res.gesture?.length); else log('state', 'passed: sensitive result match'); // test sensitive details face const face = res && res.face ? res.face[0] : null; if (!face || face?.box?.length !== 4 || face?.mesh?.length !== 478 || face?.embedding?.length !== 1024 || face?.rotation?.matrix?.length !== 9) { - log('error', 'failed: sensitive face result mismatch', res?.face?.length, face?.box?.length, face?.mesh?.length, face?.embedding?.length, face?.rotation?.matrix?.length); + log('error', 'failed: sensitive face result mismatch', res.face?.length, face?.box?.length, face?.mesh?.length, face?.embedding?.length, face?.rotation?.matrix?.length); } else log('state', 'passed: sensitive face result match'); if (!face || face?.emotion?.length < 1 || face.emotion[0].score < 0.30) log('error', 'failed: sensitive face emotion result mismatch', face?.emotion); else log('state', 'passed: sensitive face emotion result', face?.emotion); @@ -469,18 +469,18 @@ async function test(Human, inputConfig) { human.reset(); config.async = false; config.cacheSensitivity = 0; - config.face['mobilefacenet'] = { enabled: true, modelPath: 'https://vladmandic.github.io/human-models/models/mobilefacenet.json' }; + config.face.mobilefacenet = { enabled: true, modelPath: 'https://vladmandic.github.io/human-models/models/mobilefacenet.json' }; res = await testDetect(human, 'samples/in/ai-face.jpg', 'face embeddings'); if (!res || !res.face || !res.face[0] || res.face[0].embedding?.length !== 192) log('error', 'failed: mobilefacenet', { embedding: res.face?.[0]?.embedding?.length }); else log('state', 'passed: mobilefacenet', { embedding: res.face?.[0]?.embedding?.length }); - config.face['insightface'] = { enabled: true, modelPath: 'https://vladmandic.github.io/insightface/models/insightface-mobilenet-swish.json' }; + config.face.insightface = { enabled: true, modelPath: 'https://vladmandic.github.io/insightface/models/insightface-mobilenet-swish.json' }; res = await testDetect(human, 'samples/in/ai-face.jpg', 'face embeddings'); if (!res || !res.face || !res.face[0] || res.face[0]?.embedding?.length !== 512) log('error', 'failed: insightface', { embedding: res.face?.[0]?.embedding?.length }); else log('state', 'passed: insightface', { embedding: res.face?.[0]?.embedding?.length }); - human.models['mobilefacenet'] = null; - config.face['mobilefacenet'] = { enabled: false }; - human.models['insightface'] = null; - config.face['insightface'] = { enabled: false }; + human.models.mobilefacenet = null; + config.face.mobilefacenet = { enabled: false }; + human.models.insightface = null; + config.face.insightface = { enabled: false }; // test face attention log('info', 'test face attention'); @@ -498,9 +498,9 @@ async function test(Human, inputConfig) { config.face = { mesh: { enabled: false }, iris: { enabled: false }, description: { enabled: false }, emotion: { enabled: false } }; config.hand = { landmarks: false }; res = await testDetect(human, 'samples/in/ai-body.jpg', 'detectors'); - if (!res || res?.face?.length !== 1 || res?.face[0]?.gender !== 'unknown' || res?.face[0]?.age || res?.face[0]?.embedding?.length > 0) log('error', 'failed: detectors result face mismatch', res?.face); + if (!res || res.face?.length !== 1 || res.face[0]?.gender !== 'unknown' || res.face[0]?.age || res.face[0]?.embedding?.length > 0) log('error', 'failed: detectors result face mismatch', res.face); else log('state', 'passed: detector result face match'); - if (!res || res?.hand?.length !== 1 || res?.hand[0]?.landmarks?.length > 0) log('error', 'failed: detectors result hand mismatch', res?.hand?.length); + if (!res || res.hand?.length !== 1 || res.hand[0]?.landmarks?.length > 0) log('error', 'failed: detectors result hand mismatch', res.hand?.length); else log('state', 'passed: detector result hand match'); // test multiple instances diff --git a/test/test-node-wasm.js b/test/test-node-wasm.js index 9a8596fc..616a1159 100644 --- a/test/test-node-wasm.js +++ b/test/test-node-wasm.js @@ -5,9 +5,7 @@ const { Canvas, Image } = require('canvas'); // eslint-disable-line node/no-extr const H = require('../dist/human.node-wasm.js'); const test = require('./test-main.js').test; -// @ts-ignore H.env.Canvas = Canvas; // requires monkey-patch as wasm does not have tf.browser namespace -// @ts-ignore H.env.Image = Image; // requires monkey-patch as wasm does not have tf.browser namespace const config = { diff --git a/tfjs/tf-browser.ts b/tfjs/tf-browser.ts index 5a069b9c..f1139eee 100644 --- a/tfjs/tf-browser.ts +++ b/tfjs/tf-browser.ts @@ -3,6 +3,8 @@ * @external */ +/* eslint-disable import/no-extraneous-dependencies */ + // export all from build bundle export * from '@tensorflow/tfjs/dist/index.js'; export * from '@tensorflow/tfjs-backend-webgl/dist/index.js'; diff --git a/tfjs/tf-node-gpu.ts b/tfjs/tf-node-gpu.ts index 2d996377..a7dcb93c 100644 --- a/tfjs/tf-node-gpu.ts +++ b/tfjs/tf-node-gpu.ts @@ -1,4 +1,7 @@ /** * Creates tfjs bundle used by Human node-gpu build target */ + +/* eslint-disable import/no-extraneous-dependencies */ + export * from '@tensorflow/tfjs-node-gpu'; diff --git a/tfjs/tf-node-wasm.ts b/tfjs/tf-node-wasm.ts index c2051aca..122351b0 100644 --- a/tfjs/tf-node-wasm.ts +++ b/tfjs/tf-node-wasm.ts @@ -1,5 +1,8 @@ /** * Creates tfjs bundle used by Human node-wasm build target */ + +/* eslint-disable import/no-extraneous-dependencies */ + export * from '@tensorflow/tfjs'; export * from '@tensorflow/tfjs-backend-wasm'; diff --git a/tfjs/tf-node.ts b/tfjs/tf-node.ts index 41de60a2..7945d4e0 100644 --- a/tfjs/tf-node.ts +++ b/tfjs/tf-node.ts @@ -1,4 +1,7 @@ /** * Creates tfjs bundle used by Human node build target */ + +/* eslint-disable import/no-extraneous-dependencies */ + export * from '@tensorflow/tfjs-node'; diff --git a/tfjs/tf-version.ts b/tfjs/tf-version.ts index ca5d0023..b3564e53 100644 --- a/tfjs/tf-version.ts +++ b/tfjs/tf-version.ts @@ -1,3 +1,5 @@ +/* eslint-disable import/no-extraneous-dependencies */ + // get versions of all packages import { version as tfjsVersion } from '@tensorflow/tfjs/package.json'; import { version as tfjsCoreVersion } from '@tensorflow/tfjs-core/package.json'; diff --git a/tsconfig.json b/tsconfig.json index 83db3fb6..be1b8e29 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -51,8 +51,8 @@ "indentSize": 2, "tabSize": 2 }, - "exclude": ["node_modules/", "types/", "tfjs/", "dist/"], - "include": ["src", "types/human.d.ts"], + "exclude": ["node_modules/", "types/", "dist/"], + "include": ["src", "tfjs/*.ts", "types/human.d.ts", "test/**/*.ts", "demo/**/*.ts"], "typedocOptions": { "externalPattern": ["node_modules/", "tfjs/"] }