mirror of https://github.com/vladmandic/human
add human.webcam methods
parent
1ffad0ee1a
commit
ec53f70128
|
@ -1,6 +1,6 @@
|
|||
# @vladmandic/human
|
||||
|
||||
Version: **2.10.3**
|
||||
Version: **2.11.0**
|
||||
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
|
||||
|
||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||
|
@ -9,7 +9,7 @@
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2022/09/25 mandic00@live.com
|
||||
### **HEAD -> main** 2022/09/27 mandic00@live.com
|
||||
|
||||
- create funding.yml
|
||||
- fix rotation interpolation
|
||||
|
|
24
README.md
24
README.md
|
@ -22,6 +22,7 @@ JavaScript module using TensorFlow/JS Machine Learning library
|
|||
- Detection of frame changes to trigger only required models for improved performance
|
||||
- Intelligent temporal interpolation to provide smooth results regardless of processing performance
|
||||
- Simple unified API
|
||||
- Built-in Image, Video and WebCam handling
|
||||
|
||||
<br>
|
||||
|
||||
|
@ -94,7 +95,8 @@ JavaScript module using TensorFlow/JS Machine Learning library
|
|||
- [**Code Repository**](https://github.com/vladmandic/human)
|
||||
- [**NPM Package**](https://www.npmjs.com/package/@vladmandic/human)
|
||||
- [**Issues Tracker**](https://github.com/vladmandic/human/issues)
|
||||
- [**TypeDoc API Specification**](https://vladmandic.github.io/human/typedoc/classes/Human.html)
|
||||
- [**TypeDoc API Specification - Main class**](https://vladmandic.github.io/human/typedoc/classes/Human.html)
|
||||
- [**TypeDoc API Specification - Full**](https://vladmandic.github.io/human/typedoc/)
|
||||
- [**Change Log**](https://github.com/vladmandic/human/blob/main/CHANGELOG.md)
|
||||
- [**Current To-do List**](https://github.com/vladmandic/human/blob/main/TODO.md)
|
||||
|
||||
|
@ -346,13 +348,31 @@ const outputCanvas = document.getElementById('canvas-id');
|
|||
async function drawResults() {
|
||||
const interpolated = human.next(); // get smoothened result using last-known results
|
||||
human.draw.all(outputCanvas, interpolated); // draw the frame
|
||||
requestAnimationFrame(drawVideo); // run draw loop
|
||||
requestAnimationFrame(drawResults); // run draw loop
|
||||
}
|
||||
|
||||
human.video(inputVideo); // start detection loop which continously updates results
|
||||
drawResults(); // start draw loop
|
||||
```
|
||||
|
||||
or using built-in webcam helper methods that take care of video handling completely:
|
||||
|
||||
```js
|
||||
const human = new Human(); // create instance of Human
|
||||
const outputCanvas = document.getElementById('canvas-id');
|
||||
|
||||
async function drawResults() {
|
||||
const interpolated = human.next(); // get smoothened result using last-known results
|
||||
human.draw.canvas(outputCanvas, human.webcam.element); // draw current webcam frame
|
||||
human.draw.all(outputCanvas, interpolated); // draw the frame detectgion results
|
||||
requestAnimationFrame(drawResults); // run draw loop
|
||||
}
|
||||
|
||||
await human.webcam.start({ crop: true });
|
||||
human.video(human.webcam.element); // start detection loop which continously updates results
|
||||
drawResults(); // start draw loop
|
||||
```
|
||||
|
||||
And for even better results, you can run detection in a separate web worker thread
|
||||
|
||||
<br><hr><br>
|
||||
|
|
8
TODO.md
8
TODO.md
|
@ -44,13 +44,17 @@ Enable via `about:config` -> `gfx.offscreencanvas.enabled`
|
|||
|
||||
## Pending Release Changes
|
||||
|
||||
- New API [`human.video()`](https://vladmandic.github.io/human/typedoc/classes/Human.html#video)
|
||||
Runs continous detection of an input video instead of processing each frame manually
|
||||
- New methods [`human.webcam.*`](https://vladmandic.github.io/human/typedoc/classes/WebCam.html)
|
||||
Directgly configures and controls WebCam streams
|
||||
- New method [`human.video()`](https://vladmandic.github.io/human/typedoc/classes/Human.html#video)
|
||||
Runs continous detection of an input video instead of processing each frame manually using `human.detect()`
|
||||
- New simple demo [*Live*](https://vladmandic.github.io/human/demo/video/index.html)
|
||||
Full HTML and JavaScript code in less than a screen
|
||||
- New advanced demo using BabylonJS <https://vladmandic.github.io/human-bjs-vrm>
|
||||
- Enable model cache when using web workers
|
||||
- Fix for `face.rotation` interpolation
|
||||
- Improve NodeJS resolver when using ESM
|
||||
- Update demo `demo/typescript`
|
||||
- Update demo `demo/faceid`
|
||||
- Update demo `demo/nodejs/process-folder.js`
|
||||
and re-process `/samples`
|
||||
|
|
|
@ -4,6 +4,6 @@
|
|||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
|
||||
import*as c from"../../dist/human.esm.js";var w={async:!1,modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},e=new c.Human(w);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var t={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},o={detectFPS:0,drawFPS:0,frames:0,averageMs:0},i=(...a)=>{t.log.innerText+=a.join(" ")+`
|
||||
`,console.log(...a)},r=a=>t.fps.innerText=a,b=a=>t.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth},height:{ideal:document.body.clientHeight}}},d=await navigator.mediaDevices.getUserMedia(a),f=new Promise(p=>{t.video.onloadeddata=()=>p(!0)});t.video.srcObject=d,t.video.play(),await f,t.canvas.width=t.video.videoWidth,t.canvas.height=t.video.videoHeight;let s=d.getVideoTracks()[0],v=s.getCapabilities?s.getCapabilities():"",g=s.getSettings?s.getSettings():"",u=s.getConstraints?s.getConstraints():"";i("video:",t.video.videoWidth,t.video.videoHeight,s.label,{stream:d,track:s,settings:g,constraints:u,capabilities:v}),t.canvas.onclick=()=>{t.video.paused?t.video.play():t.video.pause()}}async function l(){if(!t.video.paused){n.start===0&&(n.start=e.now()),await e.detect(t.video);let a=e.tf.memory().numTensors;a-n.tensors!==0&&i("allocated tensors:",a-n.tensors),n.tensors=a,o.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,o.frames++,o.averageMs=Math.round(1e3*(e.now()-n.start)/o.frames)/1e3,o.frames%100===0&&!t.video.paused&&i("performance",{...o,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(l)}async function m(){if(!t.video.paused){let d=e.next(e.result);e.config.filter.flip?e.draw.canvas(d.canvas,t.canvas):e.draw.canvas(t.video,t.canvas),await e.draw.all(t.canvas,d),b(d.performance)}let a=e.now();o.drawFPS=Math.round(1e3*1e3/(a-n.draw))/1e3,n.draw=a,r(t.video.paused?"paused":`fps: ${o.detectFPS.toFixed(1).padStart(5," ")} detect | ${o.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(m,30)}async function M(){i("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),i("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),i("backend:",e.tf.getBackend(),"| available:",e.env.backends),i("models stats:",e.getModelStats()),i("models loaded:",Object.values(e.models).filter(a=>a!==null).length),r("initializing..."),await e.warmup(),await h(),await l(),await m()}window.onload=M;
|
||||
import*as i from"../../dist/human.esm.js";var m={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new i.Human(m);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+`
|
||||
`,console.log(...t)},d=t=>a.fps.innerText=t,f=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function l(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(l)}async function c(){if(!a.video.paused){let r=e.next(e.result);e.config.filter.flip?e.draw.canvas(r.canvas,a.canvas):e.draw.canvas(a.video,a.canvas),await e.draw.all(a.canvas,r),f(r.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,d(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(c,30)}async function u(){await e.webcam.start({element:a.video,crop:!0}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function w(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),d("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.getModelStats()),o("models loaded:",Object.values(e.models).filter(t=>t!==null).length),d("initializing..."),await e.warmup(),await u(),await l(),await c()}window.onload=w;
|
||||
//# sourceMappingURL=index.js.map
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -13,13 +13,14 @@ const humanConfig: Partial<H.Config> = { // user configuration for human, used t
|
|||
// backend: 'wasm' as const,
|
||||
// wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.20.0/dist/',
|
||||
// cacheSensitivity: 0,
|
||||
async: false,
|
||||
// async: false,
|
||||
modelBasePath: '../../models',
|
||||
filter: { enabled: true, equalization: false, flip: false },
|
||||
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },
|
||||
body: { enabled: true },
|
||||
hand: { enabled: true },
|
||||
object: { enabled: false },
|
||||
segmentation: { enabled: false },
|
||||
gesture: { enabled: true },
|
||||
};
|
||||
|
||||
|
@ -47,28 +48,6 @@ const log = (...msg) => { // helper method to output messages
|
|||
const status = (msg) => dom.fps.innerText = msg; // print status element
|
||||
const perf = (msg) => dom.perf.innerText = 'tensors:' + (human.tf.memory().numTensors as number).toString() + ' | performance: ' + JSON.stringify(msg).replace(/"|{|}/g, '').replace(/,/g, ' | '); // print performance element
|
||||
|
||||
async function webCam() { // initialize webcam
|
||||
status('starting webcam...');
|
||||
// @ts-ignore resizeMode is not yet defined in tslib
|
||||
const options: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth }, height: { ideal: document.body.clientHeight } } };
|
||||
const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);
|
||||
const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });
|
||||
dom.video.srcObject = stream;
|
||||
void dom.video.play();
|
||||
await ready;
|
||||
dom.canvas.width = dom.video.videoWidth;
|
||||
dom.canvas.height = dom.video.videoHeight;
|
||||
const track: MediaStreamTrack = stream.getVideoTracks()[0];
|
||||
const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';
|
||||
const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';
|
||||
const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';
|
||||
log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });
|
||||
dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click
|
||||
if (dom.video.paused) void dom.video.play();
|
||||
else dom.video.pause();
|
||||
};
|
||||
}
|
||||
|
||||
async function detectionLoop() { // main detection loop
|
||||
if (!dom.video.paused) {
|
||||
if (timestamp.start === 0) timestamp.start = human.now();
|
||||
|
@ -101,6 +80,16 @@ async function drawLoop() { // main screen refresh loop
|
|||
setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps
|
||||
}
|
||||
|
||||
async function webCam() {
|
||||
await human.webcam.start({ element: dom.video, crop: true }); // use human webcam helper methods and associate webcam stream with a dom element
|
||||
dom.canvas.width = human.webcam.width;
|
||||
dom.canvas.height = human.webcam.height;
|
||||
dom.canvas.onclick = async () => { // pause when clicked on screen and resume on next click
|
||||
if (human.webcam.paused) await human.webcam.play();
|
||||
else human.webcam.pause();
|
||||
};
|
||||
}
|
||||
|
||||
async function main() { // main entry point
|
||||
log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);
|
||||
log('platform:', human.env.platform, '| agent:', human.env.agent);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<title>Human</title>
|
||||
<meta name="viewport" content="width=device-width" id="viewport">
|
||||
<meta name="keywords" content="Human">
|
||||
<meta name="description" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
|
||||
<meta name="description" content="Human: Demo; Author: Vladimir Mandic <https://github.com/vladmandic>">
|
||||
<link rel="manifest" href="../manifest.webmanifest">
|
||||
<link rel="shortcut icon" href="../../favicon.ico" type="image/x-icon">
|
||||
<style>
|
||||
|
@ -15,50 +15,40 @@
|
|||
</head>
|
||||
<body>
|
||||
<canvas id="canvas" style="margin: 0 auto; width: 100%"></canvas>
|
||||
<video id="video" playsinline style="display: none"></video>
|
||||
<pre id="log" style="padding: 8px"></pre>
|
||||
<pre id="log" style="padding: 8px; position: fixed; bottom: 0"></pre>
|
||||
<script type="module">
|
||||
import * as H from '../../dist/human.esm.js'; // equivalent of import @vladmandic/Human
|
||||
|
||||
const humanConfig = { // user configuration for human, used to fine-tune behavior
|
||||
modelBasePath: '../../models',
|
||||
filter: { enabled: true, equalization: false, flip: false },
|
||||
modelBasePath: '../../models', // models can be loaded directly from cdn as well
|
||||
filter: { enabled: true, equalization: true, flip: false },
|
||||
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },
|
||||
body: { enabled: true },
|
||||
hand: { enabled: true },
|
||||
object: { enabled: false },
|
||||
gesture: { enabled: true },
|
||||
object: { enabled: false },
|
||||
segmentation: { enabled: false },
|
||||
};
|
||||
const human = new H.Human(humanConfig); // create instance of human with overrides from user configuration
|
||||
const video = document.getElementById('video');
|
||||
const canvas = document.getElementById('canvas');
|
||||
|
||||
async function webCam() { // initialize webcam
|
||||
const options = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth }, height: { ideal: document.body.clientHeight } } }; // set ideal webcam video properties
|
||||
const stream = await navigator.mediaDevices.getUserMedia(options); // find webcam stream that best matches what we want
|
||||
const videoReady = new Promise((resolve) => { video.onloadeddata = () => resolve(true); });
|
||||
video.srcObject = stream; // assign webcam stream to a video element
|
||||
video.play(); // start webcam
|
||||
await videoReady; // wait for video ready
|
||||
canvas.width = video.videoWidth; // set canvas resolution to input webcam native resolution
|
||||
canvas.height = video.videoHeight;
|
||||
canvas.onclick = () => { // pause when clicked on screen and resume on next click
|
||||
if (video.paused) void video.play();
|
||||
else video.pause();
|
||||
};
|
||||
}
|
||||
const canvas = document.getElementById('canvas'); // output canvas to draw both webcam and detection results
|
||||
|
||||
async function drawLoop() { // main screen refresh loop
|
||||
human.draw.canvas(video, canvas); // draw original video to screen canvas // better than using procesed image as this loop happens faster than processing loop
|
||||
const interpolated = human.next(); // get smoothened result using last-known results
|
||||
const interpolated = human.next(); // get smoothened result using last-known results which are continously updated based on input webcam video
|
||||
human.draw.canvas(human.webcam.element, canvas); // draw webcam video to screen canvas // better than using procesed image as this loop happens faster than processing loop
|
||||
await human.draw.all(canvas, interpolated); // draw labels, boxes, lines, etc.
|
||||
setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps
|
||||
setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 1000/30 ~ 30 fps
|
||||
}
|
||||
|
||||
async function main() { // main entry point
|
||||
document.getElementById('log').innerHTML = `human version: ${human.version} | tfjs version: ${human.tf.version['tfjs-core']}<br>platform: ${human.env.platform} | agent ${human.env.agent}<br>`;
|
||||
await webCam(); // start webcam
|
||||
human.video(video); // instruct human to detect video frames
|
||||
document.getElementById('log').innerHTML = `human version: ${human.version} | tfjs version: ${human.tf.version['tfjs-core']}<br>platform: ${human.env.platform} | agent ${human.env.agent}`;
|
||||
await human.webcam.start({ crop: true }); // find webcam and start it
|
||||
human.video(human.webcam.element); // instruct human to continously detect video frames
|
||||
canvas.width = human.webcam.width; // set canvas resolution to input webcam native resolution
|
||||
canvas.height = human.webcam.height;
|
||||
canvas.onclick = async () => { // pause when clicked on screen and resume on next click
|
||||
if (human.webcam.paused) await human.webcam.play();
|
||||
else human.webcam.pause();
|
||||
};
|
||||
await drawLoop(); // start draw loop
|
||||
}
|
||||
|
||||
|
|
12
package.json
12
package.json
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@vladmandic/human",
|
||||
"version": "2.10.3",
|
||||
"version": "2.11.0",
|
||||
"description": "Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition",
|
||||
"sideEffects": false,
|
||||
"main": "dist/human.node.js",
|
||||
|
@ -66,9 +66,9 @@
|
|||
"tensorflow"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@html-eslint/eslint-plugin": "^0.14.1",
|
||||
"@html-eslint/parser": "^0.14.0",
|
||||
"@microsoft/api-extractor": "^7.31.2",
|
||||
"@html-eslint/eslint-plugin": "^0.15.0",
|
||||
"@html-eslint/parser": "^0.15.0",
|
||||
"@microsoft/api-extractor": "^7.32.0",
|
||||
"@tensorflow/tfjs": "^3.20.0",
|
||||
"@tensorflow/tfjs-backend-cpu": "^3.20.0",
|
||||
"@tensorflow/tfjs-backend-wasm": "^3.20.0",
|
||||
|
@ -90,7 +90,7 @@
|
|||
"@vladmandic/tfjs": "github:vladmandic/tfjs",
|
||||
"@webgpu/types": "^0.1.21",
|
||||
"canvas": "^2.10.1",
|
||||
"esbuild": "^0.15.9",
|
||||
"esbuild": "^0.15.10",
|
||||
"eslint": "8.24.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-plugin-html": "^7.1.0",
|
||||
|
@ -103,6 +103,6 @@
|
|||
"seedrandom": "^3.0.5",
|
||||
"tslib": "^2.4.0",
|
||||
"typedoc": "0.23.15",
|
||||
"typescript": "4.8.3"
|
||||
"typescript": "4.8.4"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -236,7 +236,7 @@ export interface Config {
|
|||
|
||||
/** Force WASM loader to use platform fetch
|
||||
*
|
||||
* default: auto-detects to link to CDN `jsdelivr` when running in browser
|
||||
* default: false
|
||||
*/
|
||||
wasmPlatformFetch: boolean,
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ export type { Models } from './models';
|
|||
export type { Env } from './util/env';
|
||||
export type { FaceGesture, BodyGesture, HandGesture, IrisGesture } from './gesture/gesture';
|
||||
export type { Emotion, Finger, FingerCurl, FingerDirection, HandType, Gender, Race, FaceLandmark, BodyLandmark, BodyAnnotation, ObjectType } from './result';
|
||||
export type { WebCamConfig } from './util/webcam';
|
||||
export { env } from './util/env';
|
||||
|
||||
/** Events dispatched by `human.events`
|
||||
|
@ -39,6 +40,8 @@ export type ImageObjects = ImageData | ImageBitmap
|
|||
export type ExternalCanvas = typeof env.Canvas;
|
||||
/** Defines all possible input types for **Human** detection */
|
||||
export type Input = Tensor | AnyCanvas | AnyImage | AnyVideo | ImageObjects | ExternalCanvas;
|
||||
/** WebCam helper class */
|
||||
export type { WebCam } from './util/webcam';
|
||||
/** Defines model stats */
|
||||
export type { ModelStats } from './models';
|
||||
/** Defines individual model sizes */
|
||||
|
|
|
@ -36,6 +36,8 @@ import * as persons from './util/persons';
|
|||
import * as posenet from './body/posenet';
|
||||
import * as segmentation from './segmentation/segmentation';
|
||||
import * as warmups from './warmup';
|
||||
import * as webcam from './util/webcam';
|
||||
|
||||
// type definitions
|
||||
import type { Input, Tensor, DrawOptions, Config, Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult, AnyCanvas, ModelStats } from './exports';
|
||||
// type exports
|
||||
|
@ -294,6 +296,11 @@ export class Human {
|
|||
image.reset();
|
||||
}
|
||||
|
||||
/** WebCam helper methods
|
||||
*
|
||||
*/
|
||||
public webcam = new webcam.WebCam();
|
||||
|
||||
/** Load method preloads all configured models on-demand
|
||||
* - Not explicitly required as any required model is load implicitly on it's first run
|
||||
*
|
||||
|
|
|
@ -0,0 +1,193 @@
|
|||
import { log } from './util';
|
||||
|
||||
// const log = (...msg) => console.log('webcam', ...msg); // eslint-disable-line no-console
|
||||
|
||||
/** WebCam configuration */
|
||||
export interface WebCamConfig {
|
||||
/**
|
||||
* element can be:
|
||||
* - string which indicates dom element id
|
||||
* - actual HTMLVideo dom element
|
||||
* - undefined in which case a new HTMLVideoElement will be created
|
||||
*/
|
||||
element: string | HTMLVideoElement | undefined,
|
||||
/** print messages on console */
|
||||
debug: boolean,
|
||||
/** use front or back camera */
|
||||
mode: 'front' | 'back',
|
||||
/** camera crop mode */
|
||||
crop: boolean,
|
||||
/** desired webcam width */
|
||||
width: number,
|
||||
/** desired webcam height */
|
||||
height: number,
|
||||
}
|
||||
|
||||
export class WebCam { // eslint-disable-line @typescript-eslint/no-extraneous-class
|
||||
/** current webcam configuration */
|
||||
config: WebCamConfig;
|
||||
/** instance of dom element associated with webcam stream */
|
||||
element: HTMLVideoElement | undefined;
|
||||
/** active webcam stream */
|
||||
stream: MediaStream | undefined;
|
||||
|
||||
constructor() {
|
||||
this.config = {
|
||||
element: undefined,
|
||||
debug: true,
|
||||
mode: 'front',
|
||||
crop: false,
|
||||
width: 0,
|
||||
height: 0,
|
||||
};
|
||||
}
|
||||
|
||||
/** get active webcam stream track */
|
||||
public get track(): MediaStreamTrack | undefined {
|
||||
if (!this.stream) return undefined;
|
||||
return this.stream.getVideoTracks()[0];
|
||||
}
|
||||
|
||||
/** get webcam capabilities */
|
||||
public get capabilities(): MediaTrackCapabilities | undefined {
|
||||
if (!this.track) return undefined;
|
||||
return this.track.getCapabilities ? this.track.getCapabilities() : undefined;
|
||||
}
|
||||
|
||||
/** get webcam constraints */
|
||||
public get constraints(): MediaTrackConstraints | undefined {
|
||||
if (!this.track) return undefined;
|
||||
return this.track.getConstraints ? this.track.getConstraints() : undefined;
|
||||
}
|
||||
|
||||
/** get webcam settings */
|
||||
public get settings(): MediaTrackSettings | undefined {
|
||||
if (!this.stream) return undefined;
|
||||
const track: MediaStreamTrack = this.stream.getVideoTracks()[0];
|
||||
return track.getSettings ? track.getSettings() : undefined;
|
||||
}
|
||||
|
||||
/** get webcam label */
|
||||
public get label(): string {
|
||||
if (!this.track) return '';
|
||||
return this.track.label;
|
||||
}
|
||||
|
||||
/** is webcam paused */
|
||||
public get paused(): boolean {
|
||||
return this.element?.paused || false;
|
||||
}
|
||||
|
||||
/** webcam current width */
|
||||
public get width(): number {
|
||||
return this.element?.videoWidth || 0;
|
||||
}
|
||||
|
||||
/** webcam current height */
|
||||
public get height(): number {
|
||||
return this.element?.videoHeight || 0;
|
||||
}
|
||||
|
||||
/** start method initializizes webcam stream and associates it with a dom video element */
|
||||
public start = async (webcamConfig?: Partial<WebCamConfig>): Promise<void> => {
|
||||
// set config
|
||||
if (webcamConfig?.debug) this.config.debug = webcamConfig?.debug;
|
||||
if (webcamConfig?.crop) this.config.crop = webcamConfig?.crop;
|
||||
if (webcamConfig?.mode) this.config.mode = webcamConfig?.mode;
|
||||
if (webcamConfig?.width) this.config.width = webcamConfig?.width;
|
||||
if (webcamConfig?.height) this.config.height = webcamConfig?.height;
|
||||
|
||||
// use or create dom element
|
||||
if (webcamConfig?.element) {
|
||||
if (typeof webcamConfig.element === 'string') {
|
||||
const el = document.getElementById(webcamConfig.element);
|
||||
if (el && el instanceof HTMLVideoElement) {
|
||||
this.element = el;
|
||||
} else {
|
||||
if (this.config.debug) log('webcam', 'cannot get dom element', webcamConfig.element);
|
||||
return;
|
||||
}
|
||||
} else if (webcamConfig.element instanceof HTMLVideoElement) {
|
||||
this.element = webcamConfig.element;
|
||||
} else {
|
||||
if (this.config.debug) log('webcam', 'unknown dom element', webcamConfig.element);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
this.element = document.createElement('video');
|
||||
}
|
||||
|
||||
// set constraints to use
|
||||
const requestedConstraints: DisplayMediaStreamConstraints = {
|
||||
audio: false,
|
||||
video: {
|
||||
facingMode: this.config.mode === 'front' ? 'user' : 'environment',
|
||||
// @ts-ignore // resizeMode is still not defined in tslib
|
||||
resizeMode: this.config.crop ? 'crop-and-scale' : 'none',
|
||||
width: { ideal: this.config.width > 0 ? this.config.width : window.innerWidth },
|
||||
height: { ideal: this.config.height > 0 ? this.config.height : window.innerHeight },
|
||||
},
|
||||
};
|
||||
|
||||
// set default event listeners
|
||||
this.element.addEventListener('play', () => { if (this.config.debug) log('webcam', 'play'); });
|
||||
this.element.addEventListener('pause', () => { if (this.config.debug) log('webcam', 'pause'); });
|
||||
this.element.addEventListener('click', async () => { // pause when clicked on screen and resume on next click
|
||||
if (!this.element || !this.stream) return;
|
||||
if (this.element.paused) await this.element.play();
|
||||
else this.element.pause();
|
||||
});
|
||||
|
||||
// get webcam and set it to run in dom element
|
||||
if (!navigator?.mediaDevices) {
|
||||
if (this.config.debug) log('webcam', 'no devices');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
this.stream = await navigator.mediaDevices.getUserMedia(requestedConstraints); // get stream that satisfies constraints
|
||||
} catch (err) {
|
||||
log('webcam', err);
|
||||
return;
|
||||
}
|
||||
if (!this.stream) {
|
||||
if (this.config.debug) log('webcam', 'no stream');
|
||||
return;
|
||||
}
|
||||
this.element.srcObject = this.stream; // assign it to dom element
|
||||
const ready = new Promise((resolve) => { // wait until stream is ready
|
||||
if (!this.element) resolve(false);
|
||||
else this.element.onloadeddata = () => resolve(true);
|
||||
});
|
||||
await ready;
|
||||
await this.element.play(); // start playing
|
||||
|
||||
if (this.config.debug) {
|
||||
log('webcam', {
|
||||
width: this.width,
|
||||
height: this.height,
|
||||
label: this.label,
|
||||
stream: this.stream,
|
||||
track: this.track,
|
||||
settings: this.settings,
|
||||
constraints: this.constraints,
|
||||
capabilities: this.capabilities,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/** pause webcam video method */
|
||||
public pause = (): void => {
|
||||
if (this.element) this.element.pause();
|
||||
};
|
||||
|
||||
/** play webcam video method */
|
||||
public play = async (): Promise<void> => {
|
||||
if (this.element) await this.element.play();
|
||||
};
|
||||
|
||||
/** stop method stops active webcam stream track and disconnects webcam */
|
||||
public stop = (): void => {
|
||||
if (this.config.debug) log('webcam', 'stop');
|
||||
if (this.track) this.track.stop();
|
||||
};
|
||||
}
|
|
@ -1,39 +1,40 @@
|
|||
2022-09-27 11:51:20 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"2.10.3"}
|
||||
2022-09-27 11:51:20 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.10.3"}
|
||||
2022-09-27 11:51:20 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2022-09-27 11:51:20 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.9","typescript":"4.8.3","typedoc":"0.23.15","eslint":"8.24.0"}
|
||||
2022-09-27 11:51:20 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":159,"outputBytes":608}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":75,"inputBytes":658536,"outputBytes":309564}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":167,"outputBytes":612}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":75,"inputBytes":658540,"outputBytes":309568}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":206,"outputBytes":664}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":75,"inputBytes":658592,"outputBytes":309618}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1125,"outputBytes":358}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1088,"outputBytes":583}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":75,"inputBytes":658511,"outputBytes":308423}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":11,"inputBytes":1344,"outputBytes":2821914}
|
||||
2022-09-27 11:51:20 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":75,"inputBytes":3479842,"outputBytes":1688553}
|
||||
2022-09-27 11:51:21 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":75,"inputBytes":3479842,"outputBytes":3110385}
|
||||
2022-09-27 11:51:25 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||
2022-09-27 11:51:27 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":77,"generated":true}
|
||||
2022-09-27 11:51:27 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6714,"outputBytes":3134}
|
||||
2022-09-27 11:51:27 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17155,"outputBytes":9175}
|
||||
2022-09-27 11:51:38 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":110,"errors":0,"warnings":0}
|
||||
2022-09-27 11:51:38 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2022-09-27 11:51:38 [35mSTATE:[39m Copy: {"input":"tfjs/tfjs.esm.d.ts"}
|
||||
2022-09-27 11:51:38 [36mINFO: [39m Done...
|
||||
2022-09-27 11:51:39 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":193}
|
||||
2022-09-27 11:51:39 [35mSTATE:[39m Copy: {"input":"types/human.d.ts"}
|
||||
2022-09-27 11:51:39 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||
2022-09-27 11:51:39 [35mSTATE:[39m Models {"folder":"./models","models":13}
|
||||
2022-09-27 11:51:39 [35mSTATE:[39m Models {"folder":"../human-models/models","models":42}
|
||||
2022-09-27 11:51:39 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||
2022-09-27 11:51:39 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||
2022-09-27 11:51:39 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||
2022-09-27 11:51:39 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||
2022-09-27 11:51:39 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||
2022-09-27 11:51:39 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||
2022-09-27 11:51:40 [35mSTATE:[39m Models: {"count":57,"totalSize":383017442}
|
||||
2022-09-27 11:51:40 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||
2022-09-29 21:25:14 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"2.11.0"}
|
||||
2022-09-29 21:25:14 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.11.0"}
|
||||
2022-09-29 21:25:14 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2022-09-29 21:25:14 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.10","typescript":"4.8.4","typedoc":"0.23.15","eslint":"8.24.0"}
|
||||
2022-09-29 21:25:14 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":159,"outputBytes":608}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":76,"inputBytes":665091,"outputBytes":312560}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":167,"outputBytes":612}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":76,"inputBytes":665095,"outputBytes":312564}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":206,"outputBytes":664}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":76,"inputBytes":665147,"outputBytes":312614}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1125,"outputBytes":358}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1088,"outputBytes":583}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":76,"inputBytes":665066,"outputBytes":311419}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":11,"inputBytes":1344,"outputBytes":2821914}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":76,"inputBytes":3486397,"outputBytes":1691568}
|
||||
2022-09-29 21:25:14 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":76,"inputBytes":3486397,"outputBytes":3115662}
|
||||
2022-09-29 21:25:19 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||
2022-09-29 21:25:21 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":79,"generated":true}
|
||||
2022-09-29 21:25:21 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5850,"outputBytes":2632}
|
||||
2022-09-29 21:25:21 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17155,"outputBytes":9175}
|
||||
2022-09-29 21:25:31 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":111,"errors":0,"warnings":0}
|
||||
2022-09-29 21:25:31 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2022-09-29 21:25:31 [35mSTATE:[39m Copy: {"input":"tfjs/tfjs.esm.d.ts"}
|
||||
2022-09-29 21:25:31 [36mINFO: [39m Done...
|
||||
2022-09-29 21:25:32 [32mDATA: [39m API {"level":"warning","category":"Extractor","id":"ae-forgotten-export","file":"/home/vlado/dev/human/types/lib/src/human.d.ts","line":170,"text":"The symbol \"webcam\" needs to be exported by the entry point human.d.ts"}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":194}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m Copy: {"input":"types/human.d.ts"}
|
||||
2022-09-29 21:25:32 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m Models {"folder":"./models","models":13}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m Models {"folder":"../human-models/models","models":42}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||
2022-09-29 21:25:32 [35mSTATE:[39m Models: {"count":57,"totalSize":383017442}
|
||||
2022-09-29 21:25:32 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||
|
|
2003
test/test.log
2003
test/test.log
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue