From ec53f70128a0d3c55fcce72aff00f6e89ef8b3d1 Mon Sep 17 00:00:00 2001 From: Vladimir Mandic Date: Thu, 29 Sep 2022 21:28:13 -0400 Subject: [PATCH] add human.webcam methods --- CHANGELOG.md | 4 +- README.md | 24 +- TODO.md | 8 +- demo/typescript/index.js | 4 +- demo/typescript/index.js.map | 6 +- demo/typescript/index.ts | 35 +- demo/video/index.html | 48 +- package.json | 12 +- src/config.ts | 2 +- src/exports.ts | 3 + src/human.ts | 7 + src/util/webcam.ts | 193 ++++ test/build.log | 79 +- test/test.log | 2003 +++++++++++++++++----------------- 14 files changed, 1317 insertions(+), 1111 deletions(-) create mode 100644 src/util/webcam.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index b95b7449..9eef4be8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # @vladmandic/human - Version: **2.10.3** + Version: **2.11.0** Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition** Author: **Vladimir Mandic ** @@ -9,7 +9,7 @@ ## Changelog -### **HEAD -> main** 2022/09/25 mandic00@live.com +### **HEAD -> main** 2022/09/27 mandic00@live.com - create funding.yml - fix rotation interpolation diff --git a/README.md b/README.md index c0a849e7..204b2a8b 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ JavaScript module using TensorFlow/JS Machine Learning library - Detection of frame changes to trigger only required models for improved performance - Intelligent temporal interpolation to provide smooth results regardless of processing performance - Simple unified API +- Built-in Image, Video and WebCam handling
@@ -94,7 +95,8 @@ JavaScript module using TensorFlow/JS Machine Learning library - [**Code Repository**](https://github.com/vladmandic/human) - [**NPM Package**](https://www.npmjs.com/package/@vladmandic/human) - [**Issues Tracker**](https://github.com/vladmandic/human/issues) -- [**TypeDoc API Specification**](https://vladmandic.github.io/human/typedoc/classes/Human.html) +- [**TypeDoc API Specification - Main class**](https://vladmandic.github.io/human/typedoc/classes/Human.html) +- [**TypeDoc API Specification - Full**](https://vladmandic.github.io/human/typedoc/) - [**Change Log**](https://github.com/vladmandic/human/blob/main/CHANGELOG.md) - [**Current To-do List**](https://github.com/vladmandic/human/blob/main/TODO.md) @@ -346,13 +348,31 @@ const outputCanvas = document.getElementById('canvas-id'); async function drawResults() { const interpolated = human.next(); // get smoothened result using last-known results human.draw.all(outputCanvas, interpolated); // draw the frame - requestAnimationFrame(drawVideo); // run draw loop + requestAnimationFrame(drawResults); // run draw loop } human.video(inputVideo); // start detection loop which continously updates results drawResults(); // start draw loop ``` +or using built-in webcam helper methods that take care of video handling completely: + +```js +const human = new Human(); // create instance of Human +const outputCanvas = document.getElementById('canvas-id'); + +async function drawResults() { + const interpolated = human.next(); // get smoothened result using last-known results + human.draw.canvas(outputCanvas, human.webcam.element); // draw current webcam frame + human.draw.all(outputCanvas, interpolated); // draw the frame detectgion results + requestAnimationFrame(drawResults); // run draw loop +} + +await human.webcam.start({ crop: true }); +human.video(human.webcam.element); // start detection loop which continously updates results +drawResults(); // start draw loop +``` + And for even better results, you can run detection in a separate web worker thread


diff --git a/TODO.md b/TODO.md index 93758645..67a7b574 100644 --- a/TODO.md +++ b/TODO.md @@ -44,13 +44,17 @@ Enable via `about:config` -> `gfx.offscreencanvas.enabled` ## Pending Release Changes -- New API [`human.video()`](https://vladmandic.github.io/human/typedoc/classes/Human.html#video) - Runs continous detection of an input video instead of processing each frame manually +- New methods [`human.webcam.*`](https://vladmandic.github.io/human/typedoc/classes/WebCam.html) + Directgly configures and controls WebCam streams +- New method [`human.video()`](https://vladmandic.github.io/human/typedoc/classes/Human.html#video) + Runs continous detection of an input video instead of processing each frame manually using `human.detect()` - New simple demo [*Live*](https://vladmandic.github.io/human/demo/video/index.html) + Full HTML and JavaScript code in less than a screen - New advanced demo using BabylonJS - Enable model cache when using web workers - Fix for `face.rotation` interpolation - Improve NodeJS resolver when using ESM +- Update demo `demo/typescript` - Update demo `demo/faceid` - Update demo `demo/nodejs/process-folder.js` and re-process `/samples` diff --git a/demo/typescript/index.js b/demo/typescript/index.js index 1fd213b1..50a2ccfe 100644 --- a/demo/typescript/index.js +++ b/demo/typescript/index.js @@ -4,6 +4,6 @@ author: ' */ -import*as c from"../../dist/human.esm.js";var w={async:!1,modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},e=new c.Human(w);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var t={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},o={detectFPS:0,drawFPS:0,frames:0,averageMs:0},i=(...a)=>{t.log.innerText+=a.join(" ")+` -`,console.log(...a)},r=a=>t.fps.innerText=a,b=a=>t.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth},height:{ideal:document.body.clientHeight}}},d=await navigator.mediaDevices.getUserMedia(a),f=new Promise(p=>{t.video.onloadeddata=()=>p(!0)});t.video.srcObject=d,t.video.play(),await f,t.canvas.width=t.video.videoWidth,t.canvas.height=t.video.videoHeight;let s=d.getVideoTracks()[0],v=s.getCapabilities?s.getCapabilities():"",g=s.getSettings?s.getSettings():"",u=s.getConstraints?s.getConstraints():"";i("video:",t.video.videoWidth,t.video.videoHeight,s.label,{stream:d,track:s,settings:g,constraints:u,capabilities:v}),t.canvas.onclick=()=>{t.video.paused?t.video.play():t.video.pause()}}async function l(){if(!t.video.paused){n.start===0&&(n.start=e.now()),await e.detect(t.video);let a=e.tf.memory().numTensors;a-n.tensors!==0&&i("allocated tensors:",a-n.tensors),n.tensors=a,o.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,o.frames++,o.averageMs=Math.round(1e3*(e.now()-n.start)/o.frames)/1e3,o.frames%100===0&&!t.video.paused&&i("performance",{...o,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(l)}async function m(){if(!t.video.paused){let d=e.next(e.result);e.config.filter.flip?e.draw.canvas(d.canvas,t.canvas):e.draw.canvas(t.video,t.canvas),await e.draw.all(t.canvas,d),b(d.performance)}let a=e.now();o.drawFPS=Math.round(1e3*1e3/(a-n.draw))/1e3,n.draw=a,r(t.video.paused?"paused":`fps: ${o.detectFPS.toFixed(1).padStart(5," ")} detect | ${o.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(m,30)}async function M(){i("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),i("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),i("backend:",e.tf.getBackend(),"| available:",e.env.backends),i("models stats:",e.getModelStats()),i("models loaded:",Object.values(e.models).filter(a=>a!==null).length),r("initializing..."),await e.warmup(),await h(),await l(),await m()}window.onload=M; +import*as i from"../../dist/human.esm.js";var m={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new i.Human(m);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+` +`,console.log(...t)},d=t=>a.fps.innerText=t,f=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function l(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(l)}async function c(){if(!a.video.paused){let r=e.next(e.result);e.config.filter.flip?e.draw.canvas(r.canvas,a.canvas):e.draw.canvas(a.video,a.canvas),await e.draw.all(a.canvas,r),f(r.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,d(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(c,30)}async function u(){await e.webcam.start({element:a.video,crop:!0}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function w(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),d("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.getModelStats()),o("models loaded:",Object.values(e.models).filter(t=>t!==null).length),d("initializing..."),await e.warmup(),await u(),await l(),await c()}window.onload=w; //# sourceMappingURL=index.js.map diff --git a/demo/typescript/index.js.map b/demo/typescript/index.js.map index ca92fb46..d767529d 100644 --- a/demo/typescript/index.js.map +++ b/demo/typescript/index.js.map @@ -1,7 +1,7 @@ { "version": 3, "sources": ["index.ts"], - "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\n\nconst humanConfig: Partial = { // user configuration for human, used to fine-tune behavior\n // backend: 'wasm' as const,\n // wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.20.0/dist/',\n // cacheSensitivity: 0,\n async: false,\n modelBasePath: '../../models',\n filter: { enabled: true, equalization: false, flip: false },\n face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },\n body: { enabled: true },\n hand: { enabled: true },\n object: { enabled: false },\n gesture: { enabled: true },\n};\n\nconst human = new H.Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env.perfadd = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n// human.draw.options.fillPolygons = true;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('status') as HTMLPreElement,\n perf: document.getElementById('performance') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0, tensors: 0, start: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 }; // holds calculated fps information for both detect and screen refresh\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n console.log(...msg); // eslint-disable-line no-console\n};\nconst status = (msg) => dom.fps.innerText = msg; // print status element\nconst perf = (msg) => dom.perf.innerText = 'tensors:' + (human.tf.memory().numTensors as number).toString() + ' | performance: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | '); // print performance element\n\nasync function webCam() { // initialize webcam\n status('starting webcam...');\n // @ts-ignore resizeMode is not yet defined in tslib\n const options: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth }, height: { ideal: document.body.clientHeight } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n void dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n const track: MediaStreamTrack = stream.getVideoTracks()[0];\n const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';\n const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';\n const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';\n log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });\n dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click\n if (dom.video.paused) void dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n if (timestamp.start === 0) timestamp.start = human.now();\n // log('profiling data:', await human.profile(dom.video));\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const tensors = human.tf.memory().numTensors; // check current tensor usage for memory leaks\n if (tensors - timestamp.tensors !== 0) log('allocated tensors:', tensors - timestamp.tensors); // printed on start and each time there is a tensor leak\n timestamp.tensors = tensors;\n fps.detectFPS = Math.round(1000 * 1000 / (human.now() - timestamp.detect)) / 1000;\n fps.frames++;\n fps.averageMs = Math.round(1000 * (human.now() - timestamp.start) / fps.frames) / 1000;\n if (fps.frames % 100 === 0 && !dom.video.paused) log('performance', { ...fps, tensors: timestamp.tensors });\n }\n timestamp.detect = human.now();\n requestAnimationFrame(detectionLoop); // start new frame immediately\n}\n\nasync function drawLoop() { // main screen refresh loop\n if (!dom.video.paused) {\n const interpolated = human.next(human.result); // smoothen result using last-known results\n if (human.config.filter.flip) human.draw.canvas(interpolated.canvas as HTMLCanvasElement, dom.canvas); // draw processed image to screen canvas\n else human.draw.canvas(dom.video, dom.canvas); // draw original video to screen canvas // better than using procesed image as this loop happens faster than processing loop\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n perf(interpolated.performance); // write performance data\n }\n const now = human.now();\n fps.drawFPS = Math.round(1000 * 1000 / (now - timestamp.draw)) / 1000;\n timestamp.draw = now;\n status(dom.video.paused ? 'paused' : `fps: ${fps.detectFPS.toFixed(1).padStart(5, ' ')} detect | ${fps.drawFPS.toFixed(1).padStart(5, ' ')} draw`); // write status\n setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n}\n\nasync function main() { // main entry point\n log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);\n log('platform:', human.env.platform, '| agent:', human.env.agent);\n status('loading...');\n await human.load(); // preload all models\n log('backend:', human.tf.getBackend(), '| available:', human.env.backends);\n log('models stats:', human.getModelStats());\n log('models loaded:', Object.values(human.models).filter((model) => model !== null).length);\n status('initializing...');\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await webCam(); // start webcam\n await detectionLoop(); // start detection loop\n await drawLoop(); // start draw loop\n}\n\nwindow.onload = main;\n"], - "mappings": ";;;;;;AASA,UAAYA,MAAO,0BAEnB,IAAMC,EAAiC,CAIrC,MAAO,GACP,cAAe,eACf,OAAQ,CAAE,QAAS,GAAM,aAAc,GAAO,KAAM,EAAM,EAC1D,KAAM,CAAE,QAAS,GAAM,SAAU,CAAE,SAAU,EAAM,EAAG,KAAM,CAAE,QAAS,EAAK,EAAG,UAAW,CAAE,QAAS,EAAM,EAAG,KAAM,CAAE,QAAS,EAAK,EAAG,YAAa,CAAE,QAAS,EAAK,EAAG,QAAS,CAAE,QAAS,EAAK,CAAE,EAClM,KAAM,CAAE,QAAS,EAAK,EACtB,KAAM,CAAE,QAAS,EAAK,EACtB,OAAQ,CAAE,QAAS,EAAM,EACzB,QAAS,CAAE,QAAS,EAAK,CAC3B,EAEMC,EAAQ,IAAM,QAAMD,CAAW,EAErCC,EAAM,IAAI,QAAU,GACpBA,EAAM,KAAK,QAAQ,KAAO,yBAC1BA,EAAM,KAAK,QAAQ,WAAa,GAGhC,IAAMC,EAAM,CACV,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,IAAK,SAAS,eAAe,KAAK,EAClC,IAAK,SAAS,eAAe,QAAQ,EACrC,KAAM,SAAS,eAAe,aAAa,CAC7C,EACMC,EAAY,CAAE,OAAQ,EAAG,KAAM,EAAG,QAAS,EAAG,MAAO,CAAE,EACvDC,EAAM,CAAE,UAAW,EAAG,QAAS,EAAG,OAAQ,EAAG,UAAW,CAAE,EAE1DC,EAAM,IAAIC,IAAQ,CACtBJ,EAAI,IAAI,WAAaI,EAAI,KAAK,GAAG,EAAI;AAAA,EACrC,QAAQ,IAAI,GAAGA,CAAG,CACpB,EACMC,EAAUD,GAAQJ,EAAI,IAAI,UAAYI,EACtCE,EAAQF,GAAQJ,EAAI,KAAK,UAAY,WAAcD,EAAM,GAAG,OAAO,EAAE,WAAsB,SAAS,EAAI,mBAAqB,KAAK,UAAUK,CAAG,EAAE,QAAQ,SAAU,EAAE,EAAE,QAAQ,KAAM,KAAK,EAEhM,eAAeG,GAAS,CACtBF,EAAO,oBAAoB,EAE3B,IAAMG,EAAkC,CAAE,MAAO,GAAO,MAAO,CAAE,WAAY,OAAQ,WAAY,OAAQ,MAAO,CAAE,MAAO,SAAS,KAAK,WAAY,EAAG,OAAQ,CAAE,MAAO,SAAS,KAAK,YAAa,CAAE,CAAE,EAChMC,EAAsB,MAAM,UAAU,aAAa,aAAaD,CAAO,EACvEE,EAAQ,IAAI,QAASC,GAAY,CAAEX,EAAI,MAAM,aAAe,IAAMW,EAAQ,EAAI,CAAG,CAAC,EACxFX,EAAI,MAAM,UAAYS,EACjBT,EAAI,MAAM,KAAK,EACpB,MAAMU,EACNV,EAAI,OAAO,MAAQA,EAAI,MAAM,WAC7BA,EAAI,OAAO,OAASA,EAAI,MAAM,YAC9B,IAAMY,EAA0BH,EAAO,eAAe,EAAE,GAClDI,EAAgDD,EAAM,gBAAkBA,EAAM,gBAAgB,EAAI,GAClGE,EAAwCF,EAAM,YAAcA,EAAM,YAAY,EAAI,GAClFG,EAA8CH,EAAM,eAAiBA,EAAM,eAAe,EAAI,GACpGT,EAAI,SAAUH,EAAI,MAAM,WAAYA,EAAI,MAAM,YAAaY,EAAM,MAAO,CAAE,OAAAH,EAAQ,MAAAG,EAAO,SAAAE,EAAU,YAAAC,EAAa,aAAAF,CAAa,CAAC,EAC9Hb,EAAI,OAAO,QAAU,IAAM,CACrBA,EAAI,MAAM,OAAaA,EAAI,MAAM,KAAK,EACrCA,EAAI,MAAM,MAAM,CACvB,CACF,CAEA,eAAegB,GAAgB,CAC7B,GAAI,CAAChB,EAAI,MAAM,OAAQ,CACjBC,EAAU,QAAU,IAAGA,EAAU,MAAQF,EAAM,IAAI,GAEvD,MAAMA,EAAM,OAAOC,EAAI,KAAK,EAC5B,IAAMiB,EAAUlB,EAAM,GAAG,OAAO,EAAE,WAC9BkB,EAAUhB,EAAU,UAAY,GAAGE,EAAI,qBAAsBc,EAAUhB,EAAU,OAAO,EAC5FA,EAAU,QAAUgB,EACpBf,EAAI,UAAY,KAAK,MAAM,IAAO,KAAQH,EAAM,IAAI,EAAIE,EAAU,OAAO,EAAI,IAC7EC,EAAI,SACJA,EAAI,UAAY,KAAK,MAAM,KAAQH,EAAM,IAAI,EAAIE,EAAU,OAASC,EAAI,MAAM,EAAI,IAC9EA,EAAI,OAAS,MAAQ,GAAK,CAACF,EAAI,MAAM,QAAQG,EAAI,cAAe,CAAE,GAAGD,EAAK,QAASD,EAAU,OAAQ,CAAC,CAC5G,CACAA,EAAU,OAASF,EAAM,IAAI,EAC7B,sBAAsBiB,CAAa,CACrC,CAEA,eAAeE,GAAW,CACxB,GAAI,CAAClB,EAAI,MAAM,OAAQ,CACrB,IAAMmB,EAAepB,EAAM,KAAKA,EAAM,MAAM,EACxCA,EAAM,OAAO,OAAO,KAAMA,EAAM,KAAK,OAAOoB,EAAa,OAA6BnB,EAAI,MAAM,EAC/FD,EAAM,KAAK,OAAOC,EAAI,MAAOA,EAAI,MAAM,EAC5C,MAAMD,EAAM,KAAK,IAAIC,EAAI,OAAQmB,CAAY,EAC7Cb,EAAKa,EAAa,WAAW,CAC/B,CACA,IAAMC,EAAMrB,EAAM,IAAI,EACtBG,EAAI,QAAU,KAAK,MAAM,IAAO,KAAQkB,EAAMnB,EAAU,KAAK,EAAI,IACjEA,EAAU,KAAOmB,EACjBf,EAAOL,EAAI,MAAM,OAAS,SAAW,QAAQE,EAAI,UAAU,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,cAAcA,EAAI,QAAQ,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,QAAQ,EACjJ,WAAWgB,EAAU,EAAE,CACzB,CAEA,eAAeG,GAAO,CACpBlB,EAAI,iBAAkBJ,EAAM,QAAS,kBAAmBA,EAAM,GAAG,QAAQ,YAAY,EACrFI,EAAI,YAAaJ,EAAM,IAAI,SAAU,WAAYA,EAAM,IAAI,KAAK,EAChEM,EAAO,YAAY,EACnB,MAAMN,EAAM,KAAK,EACjBI,EAAI,WAAYJ,EAAM,GAAG,WAAW,EAAG,eAAgBA,EAAM,IAAI,QAAQ,EACzEI,EAAI,gBAAiBJ,EAAM,cAAc,CAAC,EAC1CI,EAAI,iBAAkB,OAAO,OAAOJ,EAAM,MAAM,EAAE,OAAQuB,GAAUA,IAAU,IAAI,EAAE,MAAM,EAC1FjB,EAAO,iBAAiB,EACxB,MAAMN,EAAM,OAAO,EACnB,MAAMQ,EAAO,EACb,MAAMS,EAAc,EACpB,MAAME,EAAS,CACjB,CAEA,OAAO,OAASG", - "names": ["H", "humanConfig", "human", "dom", "timestamp", "fps", "log", "msg", "status", "perf", "webCam", "options", "stream", "ready", "resolve", "track", "capabilities", "settings", "constraints", "detectionLoop", "tensors", "drawLoop", "interpolated", "now", "main", "model"] + "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\n\nconst humanConfig: Partial = { // user configuration for human, used to fine-tune behavior\n // backend: 'wasm' as const,\n // wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.20.0/dist/',\n // cacheSensitivity: 0,\n // async: false,\n modelBasePath: '../../models',\n filter: { enabled: true, equalization: false, flip: false },\n face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },\n body: { enabled: true },\n hand: { enabled: true },\n object: { enabled: false },\n segmentation: { enabled: false },\n gesture: { enabled: true },\n};\n\nconst human = new H.Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env.perfadd = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n// human.draw.options.fillPolygons = true;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('status') as HTMLPreElement,\n perf: document.getElementById('performance') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0, tensors: 0, start: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 }; // holds calculated fps information for both detect and screen refresh\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n console.log(...msg); // eslint-disable-line no-console\n};\nconst status = (msg) => dom.fps.innerText = msg; // print status element\nconst perf = (msg) => dom.perf.innerText = 'tensors:' + (human.tf.memory().numTensors as number).toString() + ' | performance: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | '); // print performance element\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n if (timestamp.start === 0) timestamp.start = human.now();\n // log('profiling data:', await human.profile(dom.video));\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const tensors = human.tf.memory().numTensors; // check current tensor usage for memory leaks\n if (tensors - timestamp.tensors !== 0) log('allocated tensors:', tensors - timestamp.tensors); // printed on start and each time there is a tensor leak\n timestamp.tensors = tensors;\n fps.detectFPS = Math.round(1000 * 1000 / (human.now() - timestamp.detect)) / 1000;\n fps.frames++;\n fps.averageMs = Math.round(1000 * (human.now() - timestamp.start) / fps.frames) / 1000;\n if (fps.frames % 100 === 0 && !dom.video.paused) log('performance', { ...fps, tensors: timestamp.tensors });\n }\n timestamp.detect = human.now();\n requestAnimationFrame(detectionLoop); // start new frame immediately\n}\n\nasync function drawLoop() { // main screen refresh loop\n if (!dom.video.paused) {\n const interpolated = human.next(human.result); // smoothen result using last-known results\n if (human.config.filter.flip) human.draw.canvas(interpolated.canvas as HTMLCanvasElement, dom.canvas); // draw processed image to screen canvas\n else human.draw.canvas(dom.video, dom.canvas); // draw original video to screen canvas // better than using procesed image as this loop happens faster than processing loop\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n perf(interpolated.performance); // write performance data\n }\n const now = human.now();\n fps.drawFPS = Math.round(1000 * 1000 / (now - timestamp.draw)) / 1000;\n timestamp.draw = now;\n status(dom.video.paused ? 'paused' : `fps: ${fps.detectFPS.toFixed(1).padStart(5, ' ')} detect | ${fps.drawFPS.toFixed(1).padStart(5, ' ')} draw`); // write status\n setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n}\n\nasync function webCam() {\n await human.webcam.start({ element: dom.video, crop: true }); // use human webcam helper methods and associate webcam stream with a dom element\n dom.canvas.width = human.webcam.width;\n dom.canvas.height = human.webcam.height;\n dom.canvas.onclick = async () => { // pause when clicked on screen and resume on next click\n if (human.webcam.paused) await human.webcam.play();\n else human.webcam.pause();\n };\n}\n\nasync function main() { // main entry point\n log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);\n log('platform:', human.env.platform, '| agent:', human.env.agent);\n status('loading...');\n await human.load(); // preload all models\n log('backend:', human.tf.getBackend(), '| available:', human.env.backends);\n log('models stats:', human.getModelStats());\n log('models loaded:', Object.values(human.models).filter((model) => model !== null).length);\n status('initializing...');\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await webCam(); // start webcam\n await detectionLoop(); // start detection loop\n await drawLoop(); // start draw loop\n}\n\nwindow.onload = main;\n"], + "mappings": ";;;;;;AASA,UAAYA,MAAO,0BAEnB,IAAMC,EAAiC,CAKrC,cAAe,eACf,OAAQ,CAAE,QAAS,GAAM,aAAc,GAAO,KAAM,EAAM,EAC1D,KAAM,CAAE,QAAS,GAAM,SAAU,CAAE,SAAU,EAAM,EAAG,KAAM,CAAE,QAAS,EAAK,EAAG,UAAW,CAAE,QAAS,EAAM,EAAG,KAAM,CAAE,QAAS,EAAK,EAAG,YAAa,CAAE,QAAS,EAAK,EAAG,QAAS,CAAE,QAAS,EAAK,CAAE,EAClM,KAAM,CAAE,QAAS,EAAK,EACtB,KAAM,CAAE,QAAS,EAAK,EACtB,OAAQ,CAAE,QAAS,EAAM,EACzB,aAAc,CAAE,QAAS,EAAM,EAC/B,QAAS,CAAE,QAAS,EAAK,CAC3B,EAEMC,EAAQ,IAAM,QAAMD,CAAW,EAErCC,EAAM,IAAI,QAAU,GACpBA,EAAM,KAAK,QAAQ,KAAO,yBAC1BA,EAAM,KAAK,QAAQ,WAAa,GAGhC,IAAMC,EAAM,CACV,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,IAAK,SAAS,eAAe,KAAK,EAClC,IAAK,SAAS,eAAe,QAAQ,EACrC,KAAM,SAAS,eAAe,aAAa,CAC7C,EACMC,EAAY,CAAE,OAAQ,EAAG,KAAM,EAAG,QAAS,EAAG,MAAO,CAAE,EACvDC,EAAM,CAAE,UAAW,EAAG,QAAS,EAAG,OAAQ,EAAG,UAAW,CAAE,EAE1DC,EAAM,IAAIC,IAAQ,CACtBJ,EAAI,IAAI,WAAaI,EAAI,KAAK,GAAG,EAAI;AAAA,EACrC,QAAQ,IAAI,GAAGA,CAAG,CACpB,EACMC,EAAUD,GAAQJ,EAAI,IAAI,UAAYI,EACtCE,EAAQF,GAAQJ,EAAI,KAAK,UAAY,WAAcD,EAAM,GAAG,OAAO,EAAE,WAAsB,SAAS,EAAI,mBAAqB,KAAK,UAAUK,CAAG,EAAE,QAAQ,SAAU,EAAE,EAAE,QAAQ,KAAM,KAAK,EAEhM,eAAeG,GAAgB,CAC7B,GAAI,CAACP,EAAI,MAAM,OAAQ,CACjBC,EAAU,QAAU,IAAGA,EAAU,MAAQF,EAAM,IAAI,GAEvD,MAAMA,EAAM,OAAOC,EAAI,KAAK,EAC5B,IAAMQ,EAAUT,EAAM,GAAG,OAAO,EAAE,WAC9BS,EAAUP,EAAU,UAAY,GAAGE,EAAI,qBAAsBK,EAAUP,EAAU,OAAO,EAC5FA,EAAU,QAAUO,EACpBN,EAAI,UAAY,KAAK,MAAM,IAAO,KAAQH,EAAM,IAAI,EAAIE,EAAU,OAAO,EAAI,IAC7EC,EAAI,SACJA,EAAI,UAAY,KAAK,MAAM,KAAQH,EAAM,IAAI,EAAIE,EAAU,OAASC,EAAI,MAAM,EAAI,IAC9EA,EAAI,OAAS,MAAQ,GAAK,CAACF,EAAI,MAAM,QAAQG,EAAI,cAAe,CAAE,GAAGD,EAAK,QAASD,EAAU,OAAQ,CAAC,CAC5G,CACAA,EAAU,OAASF,EAAM,IAAI,EAC7B,sBAAsBQ,CAAa,CACrC,CAEA,eAAeE,GAAW,CACxB,GAAI,CAACT,EAAI,MAAM,OAAQ,CACrB,IAAMU,EAAeX,EAAM,KAAKA,EAAM,MAAM,EACxCA,EAAM,OAAO,OAAO,KAAMA,EAAM,KAAK,OAAOW,EAAa,OAA6BV,EAAI,MAAM,EAC/FD,EAAM,KAAK,OAAOC,EAAI,MAAOA,EAAI,MAAM,EAC5C,MAAMD,EAAM,KAAK,IAAIC,EAAI,OAAQU,CAAY,EAC7CJ,EAAKI,EAAa,WAAW,CAC/B,CACA,IAAMC,EAAMZ,EAAM,IAAI,EACtBG,EAAI,QAAU,KAAK,MAAM,IAAO,KAAQS,EAAMV,EAAU,KAAK,EAAI,IACjEA,EAAU,KAAOU,EACjBN,EAAOL,EAAI,MAAM,OAAS,SAAW,QAAQE,EAAI,UAAU,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,cAAcA,EAAI,QAAQ,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,QAAQ,EACjJ,WAAWO,EAAU,EAAE,CACzB,CAEA,eAAeG,GAAS,CACtB,MAAMb,EAAM,OAAO,MAAM,CAAE,QAASC,EAAI,MAAO,KAAM,EAAK,CAAC,EAC3DA,EAAI,OAAO,MAAQD,EAAM,OAAO,MAChCC,EAAI,OAAO,OAASD,EAAM,OAAO,OACjCC,EAAI,OAAO,QAAU,SAAY,CAC3BD,EAAM,OAAO,OAAQ,MAAMA,EAAM,OAAO,KAAK,EAC5CA,EAAM,OAAO,MAAM,CAC1B,CACF,CAEA,eAAec,GAAO,CACpBV,EAAI,iBAAkBJ,EAAM,QAAS,kBAAmBA,EAAM,GAAG,QAAQ,YAAY,EACrFI,EAAI,YAAaJ,EAAM,IAAI,SAAU,WAAYA,EAAM,IAAI,KAAK,EAChEM,EAAO,YAAY,EACnB,MAAMN,EAAM,KAAK,EACjBI,EAAI,WAAYJ,EAAM,GAAG,WAAW,EAAG,eAAgBA,EAAM,IAAI,QAAQ,EACzEI,EAAI,gBAAiBJ,EAAM,cAAc,CAAC,EAC1CI,EAAI,iBAAkB,OAAO,OAAOJ,EAAM,MAAM,EAAE,OAAQe,GAAUA,IAAU,IAAI,EAAE,MAAM,EAC1FT,EAAO,iBAAiB,EACxB,MAAMN,EAAM,OAAO,EACnB,MAAMa,EAAO,EACb,MAAML,EAAc,EACpB,MAAME,EAAS,CACjB,CAEA,OAAO,OAASI", + "names": ["H", "humanConfig", "human", "dom", "timestamp", "fps", "log", "msg", "status", "perf", "detectionLoop", "tensors", "drawLoop", "interpolated", "now", "webCam", "main", "model"] } diff --git a/demo/typescript/index.ts b/demo/typescript/index.ts index 5733227a..b35bc198 100644 --- a/demo/typescript/index.ts +++ b/demo/typescript/index.ts @@ -13,13 +13,14 @@ const humanConfig: Partial = { // user configuration for human, used t // backend: 'wasm' as const, // wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.20.0/dist/', // cacheSensitivity: 0, - async: false, + // async: false, modelBasePath: '../../models', filter: { enabled: true, equalization: false, flip: false }, face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } }, body: { enabled: true }, hand: { enabled: true }, object: { enabled: false }, + segmentation: { enabled: false }, gesture: { enabled: true }, }; @@ -47,28 +48,6 @@ const log = (...msg) => { // helper method to output messages const status = (msg) => dom.fps.innerText = msg; // print status element const perf = (msg) => dom.perf.innerText = 'tensors:' + (human.tf.memory().numTensors as number).toString() + ' | performance: ' + JSON.stringify(msg).replace(/"|{|}/g, '').replace(/,/g, ' | '); // print performance element -async function webCam() { // initialize webcam - status('starting webcam...'); - // @ts-ignore resizeMode is not yet defined in tslib - const options: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth }, height: { ideal: document.body.clientHeight } } }; - const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options); - const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); }); - dom.video.srcObject = stream; - void dom.video.play(); - await ready; - dom.canvas.width = dom.video.videoWidth; - dom.canvas.height = dom.video.videoHeight; - const track: MediaStreamTrack = stream.getVideoTracks()[0]; - const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : ''; - const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : ''; - const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : ''; - log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities }); - dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click - if (dom.video.paused) void dom.video.play(); - else dom.video.pause(); - }; -} - async function detectionLoop() { // main detection loop if (!dom.video.paused) { if (timestamp.start === 0) timestamp.start = human.now(); @@ -101,6 +80,16 @@ async function drawLoop() { // main screen refresh loop setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps } +async function webCam() { + await human.webcam.start({ element: dom.video, crop: true }); // use human webcam helper methods and associate webcam stream with a dom element + dom.canvas.width = human.webcam.width; + dom.canvas.height = human.webcam.height; + dom.canvas.onclick = async () => { // pause when clicked on screen and resume on next click + if (human.webcam.paused) await human.webcam.play(); + else human.webcam.pause(); + }; +} + async function main() { // main entry point log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']); log('platform:', human.env.platform, '| agent:', human.env.agent); diff --git a/demo/video/index.html b/demo/video/index.html index 54744b9c..073bcd93 100644 --- a/demo/video/index.html +++ b/demo/video/index.html @@ -5,7 +5,7 @@ Human - +