mirror of https://github.com/vladmandic/human
updated iife and esm demos
parent
937a97f0d6
commit
134fdeeae1
90
README.md
90
README.md
|
@ -4,6 +4,8 @@ URL: <https://github.com/vladmandic/human>
|
|||
|
||||
*Suggestions are welcome!*
|
||||
|
||||
<hr>
|
||||
|
||||
## Credits
|
||||
|
||||
This is an amalgamation of multiple existing models:
|
||||
|
@ -15,21 +17,80 @@ This is an amalgamation of multiple existing models:
|
|||
- Body Pose Detection: [**PoseNet**](https://medium.com/tensorflow/real-time-human-pose-estimation-in-the-browser-with-tensorflow-js-7dd0bc881cd5)
|
||||
- Age & Gender Prediction: [**SSR-Net**](https://github.com/shamangary/SSR-Net)
|
||||
|
||||
## Install
|
||||
<hr>
|
||||
|
||||
```shell
|
||||
npm install @vladmandic/human
|
||||
## Installation
|
||||
|
||||
There are several ways to use Human:
|
||||
|
||||
**Important**
|
||||
*This version of `Human` includes `TensorFlow/JS (TFJS) 2.6.0` library which can be accessed via `human.tf`*
|
||||
*You should not manually load another instance of `tfjs`, but if you do, be aware of possible version conflicts*
|
||||
|
||||
### 1. IIFE script
|
||||
|
||||
This is simplest way for usage within Browser
|
||||
Simply download `dist/human.js`, include it in your `HTML` file & it's ready to use.
|
||||
|
||||
```html
|
||||
<script src="dist/human.js"><script>
|
||||
```
|
||||
|
||||
All pre-trained models are included in folder `/models` (25MB total)
|
||||
IIFE script auto-registers global namespace `human` within Window object.
|
||||
|
||||
### 2. ESM module
|
||||
|
||||
#### 2.1 With Bundler
|
||||
|
||||
If you're using bundler *(such as rollup, webpack, esbuild)* to package your client application, you can import ESM version of `Human` which supports full tree shaking
|
||||
|
||||
```js
|
||||
import human from 'dist/human.esm.js';
|
||||
```
|
||||
|
||||
#### 2.2 Using Script Module
|
||||
You could use same syntax within your main `JS` file if it's imported with `<script type="module">`
|
||||
|
||||
```html
|
||||
<script src="./index.js" type="module">
|
||||
```
|
||||
and then in your `index.js`
|
||||
|
||||
```js
|
||||
import human from 'dist/human.esm.js';
|
||||
```
|
||||
|
||||
### 3. NPM module
|
||||
|
||||
Simmilar to ESM module, but with full sources as it points to `build/src/index.js` instead
|
||||
Recommended for `NodeJS` projects
|
||||
|
||||
Install with:
|
||||
```shell
|
||||
npm install @tensorflow/tfjs @vladmandic/Human
|
||||
```
|
||||
And then use with:
|
||||
```js
|
||||
import * as tf from '@tensorflow/tfjs';
|
||||
import human from '@vladmandic/Human';
|
||||
```
|
||||
|
||||
### Weights
|
||||
|
||||
Pretrained model weights are includes in `./models`.
|
||||
|
||||
<hr>
|
||||
|
||||
## Demo
|
||||
|
||||
Demo is included in `/demo`
|
||||
Demos are included in `/demo`:
|
||||
|
||||
## Requirements
|
||||
- `demo-esm`: Demo using ESM module
|
||||
- `demo-iife`: Demo using IIFE module
|
||||
|
||||
`Human` library is based on [TensorFlow/JS (TFJS)](js.tensorflow.org), but does not package it to allow for indepdenent version management - import `tfjs` before importing `Human`
|
||||
Both demos are identical, they just illustrate different ways to load `Human` library
|
||||
|
||||
<hr>
|
||||
|
||||
## Usage
|
||||
|
||||
|
@ -47,13 +108,16 @@ import human from '@vladmandic/human';
|
|||
const results = await human.detect(image, options?)
|
||||
```
|
||||
|
||||
Additionally, `Human` library exposes two classes:
|
||||
Additionally, `Human` library exposes several classes:
|
||||
|
||||
```js
|
||||
human.defaults // default configuration object
|
||||
human.models // dynamically maintained object of any loaded models
|
||||
human.tf // instance of tfjs used by human
|
||||
```
|
||||
|
||||
<hr>
|
||||
|
||||
## Configuration
|
||||
|
||||
Below is output of `human.defaults` object
|
||||
|
@ -124,6 +188,8 @@ Where:
|
|||
- `scoreThreshold`: threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||
- `nmsRadius`: radius for deciding points are too close in non-maximum suppression
|
||||
|
||||
<hr>
|
||||
|
||||
## Outputs
|
||||
|
||||
Result of `humand.detect()` is a single object that includes data for all enabled modules and all detected objects:
|
||||
|
@ -159,15 +225,19 @@ result = {
|
|||
}
|
||||
```
|
||||
|
||||
<hr>
|
||||
|
||||
## Performance
|
||||
|
||||
Of course, performance will vary depending on your hardware, but also on number of enabled modules as well as their parameters.
|
||||
For example, on a low-end nVidia GTX1050 it can perform face detection at 50+ FPS, but drop to <5 FPS if all modules are enabled.
|
||||
|
||||
<hr>
|
||||
|
||||
## Todo
|
||||
|
||||
- Improve detection of smaller faces, add BlazeFace back model
|
||||
- Create demo, host it on gitpages
|
||||
- Implement draw helper functions
|
||||
- Memory leak in facemesh detector
|
||||
- Host demo it on gitpages
|
||||
- Sample Images
|
||||
- Rename human to human
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
<head>
|
||||
<script src="https://cdn.jsdelivr.net/npm/quicksettings@latest/quicksettings.min.js"></script>
|
||||
<script src="./demo-esm.js" type="module"></script>
|
||||
</head>
|
||||
<body style="margin: 0; background: black; color: white; font-family: 'Segoe UI'">
|
||||
<div id="main">
|
||||
<video id="video" playsinline style="display: none"></video>
|
||||
<canvas id="canvas"></canvas>
|
||||
<div id="log"></div>
|
||||
</div>
|
||||
</body>
|
|
@ -0,0 +1,269 @@
|
|||
/* global QuickSettings */
|
||||
/* eslint-disable no-return-assign */
|
||||
|
||||
import human from '../dist/human.esm.js';
|
||||
|
||||
const config = {
|
||||
face: {
|
||||
enabled: false,
|
||||
detector: { maxFaces: 10, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
||||
mesh: { enabled: false },
|
||||
iris: { enabled: false },
|
||||
age: { enabled: false, skipFrames: 5 },
|
||||
gender: { enabled: false },
|
||||
},
|
||||
body: { enabled: false, maxDetections: 5, scoreThreshold: 0.75, nmsRadius: 20 },
|
||||
hand: { enabled: false, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
||||
};
|
||||
let settings;
|
||||
|
||||
async function drawFace(result) {
|
||||
const canvas = document.getElementById('canvas');
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.fillStyle = 'lightcoral';
|
||||
ctx.strokeStyle = 'lightcoral';
|
||||
ctx.font = 'small-caps 1rem "Segoe UI"';
|
||||
for (const face of result) {
|
||||
ctx.beginPath();
|
||||
ctx.rect(face.box[0], face.box[1], face.box[2], face.box[3]);
|
||||
ctx.fillText(`face ${face.gender || ''} ${face.age || ''} ${face.iris ? 'iris: ' + face.iris : ''}`, face.box[0] + 2, face.box[1] + 16, face.box[2]);
|
||||
ctx.stroke();
|
||||
if (face.mesh) {
|
||||
if (settings.getValue('Draw Points')) {
|
||||
for (const point of face.mesh) {
|
||||
ctx.fillStyle = `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)`;
|
||||
ctx.beginPath();
|
||||
ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
if (settings.getValue('Draw Polygons')) {
|
||||
for (let i = 0; i < human.facemesh.triangulation.length / 3; i++) {
|
||||
const points = [
|
||||
human.facemesh.triangulation[i * 3 + 0],
|
||||
human.facemesh.triangulation[i * 3 + 1],
|
||||
human.facemesh.triangulation[i * 3 + 2],
|
||||
].map((index) => face.mesh[index]);
|
||||
const path = new Path2D();
|
||||
path.moveTo(points[0][0], points[0][1]);
|
||||
for (const point of points) {
|
||||
path.lineTo(point[0], point[1]);
|
||||
}
|
||||
path.closePath();
|
||||
ctx.fillStyle = `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.5)`;
|
||||
ctx.strokeStyle = `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.5)`;
|
||||
ctx.stroke(path);
|
||||
if (settings.getValue('Fill Polygons')) {
|
||||
ctx.fill(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function drawBody(result) {
|
||||
const canvas = document.getElementById('canvas');
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.fillStyle = 'lightcoral';
|
||||
ctx.strokeStyle = 'lightcoral';
|
||||
ctx.font = 'small-caps 1rem "Segoe UI"';
|
||||
for (const pose of result) {
|
||||
if (settings.getValue('Draw Points')) {
|
||||
for (const point of pose.keypoints) {
|
||||
ctx.beginPath();
|
||||
ctx.arc(point.position.x, point.position.y, 2, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
if (settings.getValue('Draw Polygons')) {
|
||||
const path = new Path2D();
|
||||
let part;
|
||||
// torso
|
||||
part = pose.keypoints.find((a) => a.part === 'leftShoulder');
|
||||
path.moveTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightShoulder');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightHip');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftHip');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftShoulder');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
// legs
|
||||
part = pose.keypoints.find((a) => a.part === 'leftHip');
|
||||
path.moveTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftKnee');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftAnkle');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightHip');
|
||||
path.moveTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightKnee');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightAnkle');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
// arms
|
||||
part = pose.keypoints.find((a) => a.part === 'leftShoulder');
|
||||
path.moveTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftElbow');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftWrist');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
// arms
|
||||
part = pose.keypoints.find((a) => a.part === 'rightShoulder');
|
||||
path.moveTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightElbow');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightWrist');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
// draw all
|
||||
ctx.stroke(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function drawHand(result) {
|
||||
const canvas = document.getElementById('canvas');
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.font = 'small-caps 1rem "Segoe UI"';
|
||||
window.result = result;
|
||||
for (const hand of result) {
|
||||
if (settings.getValue('Draw Points')) {
|
||||
for (const point of hand.landmarks) {
|
||||
ctx.fillStyle = `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)`;
|
||||
ctx.beginPath();
|
||||
ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
if (settings.getValue('Draw Polygons')) {
|
||||
const addPart = (part) => {
|
||||
ctx.beginPath();
|
||||
for (const i in part) {
|
||||
ctx.strokeStyle = `rgba(${127.5 + (2 * part[i][2])}, ${127.5 - (2 * part[i][2])}, 255, 0.5)`;
|
||||
if (i === 0) ctx.moveTo(part[i][0], part[i][1]);
|
||||
else ctx.lineTo(part[i][0], part[i][1]);
|
||||
}
|
||||
ctx.stroke();
|
||||
};
|
||||
addPart(hand.annotations.indexFinger);
|
||||
addPart(hand.annotations.middleFinger);
|
||||
addPart(hand.annotations.ringFinger);
|
||||
addPart(hand.annotations.pinky);
|
||||
addPart(hand.annotations.thumb);
|
||||
addPart(hand.annotations.palmBase);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function runHumanDetect() {
|
||||
const video = document.getElementById('video');
|
||||
const canvas = document.getElementById('canvas');
|
||||
const log = document.getElementById('log');
|
||||
const live = video.srcObject ? ((video.srcObject.getVideoTracks()[0].readyState === 'live') && (video.readyState > 2) && (!video.paused)) : false;
|
||||
if (live) {
|
||||
// perform detection
|
||||
const t0 = performance.now();
|
||||
const result = await human.detect(video, config);
|
||||
const t1 = performance.now();
|
||||
// update fps
|
||||
settings.setValue('FPS', Math.round(1000 / (t1 - t0)));
|
||||
// draw image from video
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.drawImage(video, 0, 0, video.width, video.height, 0, 0, canvas.width, canvas.height);
|
||||
// draw all results
|
||||
drawFace(result.face);
|
||||
drawBody(result.body);
|
||||
drawHand(result.hand);
|
||||
// update log
|
||||
const engine = await human.tf.engine();
|
||||
log.innerText = `
|
||||
TFJS Version: ${human.tf.version_core} Memory: ${engine.state.numBytes.toLocaleString()} bytes ${engine.state.numDataBuffers.toLocaleString()} buffers ${engine.state.numTensors.toLocaleString()} tensors
|
||||
GPU Memory: used ${engine.backendInstance.numBytesInGPU.toLocaleString()} bytes free ${Math.floor(1024 * 1024 * engine.backendInstance.numMBBeforeWarning).toLocaleString()} bytes
|
||||
Result: Face: ${(JSON.stringify(result.face)).length.toLocaleString()} bytes Body: ${(JSON.stringify(result.body)).length.toLocaleString()} bytes Hand: ${(JSON.stringify(result.hand)).length.toLocaleString()} bytes
|
||||
`;
|
||||
// rinse & repeate
|
||||
requestAnimationFrame(runHumanDetect);
|
||||
}
|
||||
}
|
||||
|
||||
function setupGUI() {
|
||||
settings.addRange('FPS', 0, 100, 0, 1);
|
||||
settings.addBoolean('Pause', false, (val) => {
|
||||
if (val) document.getElementById('video').pause();
|
||||
else document.getElementById('video').play();
|
||||
runHumanDetect();
|
||||
});
|
||||
settings.addHTML('line1', '<hr>'); settings.hideTitle('line1');
|
||||
settings.addBoolean('Draw Points', true);
|
||||
settings.addBoolean('Draw Polygons', true);
|
||||
settings.addBoolean('Fill Polygons', true);
|
||||
settings.addHTML('line2', '<hr>'); settings.hideTitle('line2');
|
||||
settings.addBoolean('Face Detect', config.face.enabled, (val) => config.face.enabled = val);
|
||||
settings.addBoolean('Face Mesh', config.face.mesh.enabled, (val) => config.face.mesh.enabled = val);
|
||||
settings.addBoolean('Face Iris', config.face.iris.enabled, (val) => config.face.iris.enabled = val);
|
||||
settings.addBoolean('Face Age', config.face.age.enabled, (val) => config.face.age.enabled = val);
|
||||
settings.addBoolean('Face Gender', config.face.gender.enabled, (val) => config.face.gender.enabled = val);
|
||||
settings.addBoolean('Body Pose', config.body.enabled, (val) => config.body.enabled = val);
|
||||
settings.addBoolean('Hand Pose', config.hand.enabled, (val) => config.hand.enabled = val);
|
||||
settings.addHTML('line3', '<hr>'); settings.hideTitle('line3');
|
||||
settings.addRange('Max Objects', 1, 20, 5, 1, (val) => {
|
||||
config.face.detector.maxFaces = parseInt(val);
|
||||
config.body.maxDetections = parseInt(val);
|
||||
});
|
||||
settings.addRange('Skip Frames', 1, 20, config.face.detector.skipFrames, 1, (val) => {
|
||||
config.face.detector.skipFrames = parseInt(val);
|
||||
config.face.age.skipFrames = parseInt(val);
|
||||
config.hand.skipFrames = parseInt(val);
|
||||
});
|
||||
settings.addRange('Min Confidence', 0.1, 1.0, config.face.detector.minConfidence, 0.05, (val) => {
|
||||
config.face.detector.minConfidence = parseFloat(val);
|
||||
config.hand.minConfidence = parseFloat(val);
|
||||
});
|
||||
settings.addRange('Score Threshold', 0.1, 1.0, config.face.detector.scoreThreshold, 0.05, (val) => {
|
||||
config.face.detector.scoreThreshold = parseFloat(val);
|
||||
config.hand.scoreThreshold = parseFloat(val);
|
||||
config.body.scoreThreshold = parseFloat(val);
|
||||
});
|
||||
settings.addRange('IOU Threshold', 0.1, 1.0, config.face.detector.iouThreshold, 0.05, (val) => {
|
||||
config.face.detector.iouThreshold = parseFloat(val);
|
||||
config.hand.iouThreshold = parseFloat(val);
|
||||
});
|
||||
}
|
||||
|
||||
async function setupCanvas() {
|
||||
const video = document.getElementById('video');
|
||||
const canvas = document.getElementById('canvas');
|
||||
canvas.width = video.width;
|
||||
canvas.height = video.height;
|
||||
settings = QuickSettings.create(10, 10, 'Settings', document.getElementById('main'));
|
||||
}
|
||||
|
||||
async function setupCamera() {
|
||||
const video = document.getElementById('video');
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: false,
|
||||
video: { facingMode: 'user', width: window.innerWidth, height: window.innerHeight },
|
||||
});
|
||||
video.srcObject = stream;
|
||||
return new Promise((resolve) => {
|
||||
video.onloadedmetadata = () => {
|
||||
resolve(video);
|
||||
video.width = video.videoWidth;
|
||||
video.height = video.videoHeight;
|
||||
video.play();
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await human.tf.setBackend('webgl');
|
||||
await human.tf.ready();
|
||||
await setupCamera();
|
||||
await setupCanvas();
|
||||
await setupGUI();
|
||||
runHumanDetect();
|
||||
}
|
||||
|
||||
window.onload = main;
|
|
@ -1,12 +1,12 @@
|
|||
<head>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/tensorflow/2.6.0/tf.es2017.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/quicksettings@latest/quicksettings.min.js"></script>
|
||||
<script src="../dist/human.js"></script>
|
||||
<script src="./index.js"></script>
|
||||
<script src="./demo-iife.js"></script>
|
||||
</head>
|
||||
<body style="margin: 0; background: black">
|
||||
<body style="margin: 0; background: black; color: white; font-family: 'Segoe UI'">
|
||||
<div id="main">
|
||||
<video id="video" playsinline style="display: none"></video>
|
||||
<canvas id="canvas"></canvas>
|
||||
<div id="log"></div>
|
||||
</div>
|
||||
</body>
|
|
@ -0,0 +1,267 @@
|
|||
/* eslint-disable no-return-assign */
|
||||
/* global human, QuickSettings */
|
||||
|
||||
const config = {
|
||||
face: {
|
||||
enabled: false,
|
||||
detector: { maxFaces: 10, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
||||
mesh: { enabled: false },
|
||||
iris: { enabled: false },
|
||||
age: { enabled: false, skipFrames: 5 },
|
||||
gender: { enabled: false },
|
||||
},
|
||||
body: { enabled: false, maxDetections: 5, scoreThreshold: 0.75, nmsRadius: 20 },
|
||||
hand: { enabled: false, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
||||
};
|
||||
let settings;
|
||||
|
||||
async function drawFace(result) {
|
||||
const canvas = document.getElementById('canvas');
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.fillStyle = 'lightcoral';
|
||||
ctx.strokeStyle = 'lightcoral';
|
||||
ctx.font = 'small-caps 1rem "Segoe UI"';
|
||||
for (const face of result) {
|
||||
ctx.beginPath();
|
||||
ctx.rect(face.box[0], face.box[1], face.box[2], face.box[3]);
|
||||
ctx.fillText(`face ${face.gender || ''} ${face.age || ''} ${face.iris ? 'iris: ' + face.iris : ''}`, face.box[0] + 2, face.box[1] + 16, face.box[2]);
|
||||
ctx.stroke();
|
||||
if (face.mesh) {
|
||||
if (settings.getValue('Draw Points')) {
|
||||
for (const point of face.mesh) {
|
||||
ctx.fillStyle = `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)`;
|
||||
ctx.beginPath();
|
||||
ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
if (settings.getValue('Draw Polygons')) {
|
||||
for (let i = 0; i < human.facemesh.triangulation.length / 3; i++) {
|
||||
const points = [
|
||||
human.facemesh.triangulation[i * 3 + 0],
|
||||
human.facemesh.triangulation[i * 3 + 1],
|
||||
human.facemesh.triangulation[i * 3 + 2],
|
||||
].map((index) => face.mesh[index]);
|
||||
const path = new Path2D();
|
||||
path.moveTo(points[0][0], points[0][1]);
|
||||
for (const point of points) {
|
||||
path.lineTo(point[0], point[1]);
|
||||
}
|
||||
path.closePath();
|
||||
ctx.fillStyle = `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.5)`;
|
||||
ctx.strokeStyle = `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.5)`;
|
||||
ctx.stroke(path);
|
||||
if (settings.getValue('Fill Polygons')) {
|
||||
ctx.fill(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function drawBody(result) {
|
||||
const canvas = document.getElementById('canvas');
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.fillStyle = 'lightcoral';
|
||||
ctx.strokeStyle = 'lightcoral';
|
||||
ctx.font = 'small-caps 1rem "Segoe UI"';
|
||||
for (const pose of result) {
|
||||
if (settings.getValue('Draw Points')) {
|
||||
for (const point of pose.keypoints) {
|
||||
ctx.beginPath();
|
||||
ctx.arc(point.position.x, point.position.y, 2, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
if (settings.getValue('Draw Polygons')) {
|
||||
const path = new Path2D();
|
||||
let part;
|
||||
// torso
|
||||
part = pose.keypoints.find((a) => a.part === 'leftShoulder');
|
||||
path.moveTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightShoulder');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightHip');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftHip');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftShoulder');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
// legs
|
||||
part = pose.keypoints.find((a) => a.part === 'leftHip');
|
||||
path.moveTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftKnee');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftAnkle');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightHip');
|
||||
path.moveTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightKnee');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightAnkle');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
// arms
|
||||
part = pose.keypoints.find((a) => a.part === 'leftShoulder');
|
||||
path.moveTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftElbow');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'leftWrist');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
// arms
|
||||
part = pose.keypoints.find((a) => a.part === 'rightShoulder');
|
||||
path.moveTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightElbow');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
part = pose.keypoints.find((a) => a.part === 'rightWrist');
|
||||
path.lineTo(part.position.x, part.position.y);
|
||||
// draw all
|
||||
ctx.stroke(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function drawHand(result) {
|
||||
const canvas = document.getElementById('canvas');
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.font = 'small-caps 1rem "Segoe UI"';
|
||||
window.result = result;
|
||||
for (const hand of result) {
|
||||
if (settings.getValue('Draw Points')) {
|
||||
for (const point of hand.landmarks) {
|
||||
ctx.fillStyle = `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)`;
|
||||
ctx.beginPath();
|
||||
ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
if (settings.getValue('Draw Polygons')) {
|
||||
const addPart = (part) => {
|
||||
ctx.beginPath();
|
||||
for (const i in part) {
|
||||
ctx.strokeStyle = `rgba(${127.5 + (2 * part[i][2])}, ${127.5 - (2 * part[i][2])}, 255, 0.5)`;
|
||||
if (i === 0) ctx.moveTo(part[i][0], part[i][1]);
|
||||
else ctx.lineTo(part[i][0], part[i][1]);
|
||||
}
|
||||
ctx.stroke();
|
||||
};
|
||||
addPart(hand.annotations.indexFinger);
|
||||
addPart(hand.annotations.middleFinger);
|
||||
addPart(hand.annotations.ringFinger);
|
||||
addPart(hand.annotations.pinky);
|
||||
addPart(hand.annotations.thumb);
|
||||
addPart(hand.annotations.palmBase);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function runHumanDetect() {
|
||||
const video = document.getElementById('video');
|
||||
const canvas = document.getElementById('canvas');
|
||||
const log = document.getElementById('log');
|
||||
const live = video.srcObject ? ((video.srcObject.getVideoTracks()[0].readyState === 'live') && (video.readyState > 2) && (!video.paused)) : false;
|
||||
if (live) {
|
||||
// perform detection
|
||||
const t0 = performance.now();
|
||||
const result = await human.detect(video, config);
|
||||
const t1 = performance.now();
|
||||
// update fps
|
||||
settings.setValue('FPS', Math.round(1000 / (t1 - t0)));
|
||||
// draw image from video
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.drawImage(video, 0, 0, video.width, video.height, 0, 0, canvas.width, canvas.height);
|
||||
// draw all results
|
||||
drawFace(result.face);
|
||||
drawBody(result.body);
|
||||
drawHand(result.hand);
|
||||
// update log
|
||||
const engine = await human.tf.engine();
|
||||
log.innerText = `
|
||||
TFJS Version: ${human.tf.version_core} Memory: ${engine.state.numBytes.toLocaleString()} bytes ${engine.state.numDataBuffers.toLocaleString()} buffers ${engine.state.numTensors.toLocaleString()} tensors
|
||||
GPU Memory: used ${engine.backendInstance.numBytesInGPU.toLocaleString()} bytes free ${Math.floor(1024 * 1024 * engine.backendInstance.numMBBeforeWarning).toLocaleString()} bytes
|
||||
Result: Face: ${(JSON.stringify(result.face)).length.toLocaleString()} bytes Body: ${(JSON.stringify(result.body)).length.toLocaleString()} bytes Hand: ${(JSON.stringify(result.hand)).length.toLocaleString()} bytes
|
||||
`;
|
||||
// rinse & repeate
|
||||
requestAnimationFrame(runHumanDetect);
|
||||
}
|
||||
}
|
||||
|
||||
function setupGUI() {
|
||||
settings.addRange('FPS', 0, 100, 0, 1);
|
||||
settings.addBoolean('Pause', false, (val) => {
|
||||
if (val) document.getElementById('video').pause();
|
||||
else document.getElementById('video').play();
|
||||
runHumanDetect();
|
||||
});
|
||||
settings.addHTML('line1', '<hr>'); settings.hideTitle('line1');
|
||||
settings.addBoolean('Draw Points', true);
|
||||
settings.addBoolean('Draw Polygons', true);
|
||||
settings.addBoolean('Fill Polygons', true);
|
||||
settings.addHTML('line2', '<hr>'); settings.hideTitle('line2');
|
||||
settings.addBoolean('Face Detect', config.face.enabled, (val) => config.face.enabled = val);
|
||||
settings.addBoolean('Face Mesh', config.face.mesh.enabled, (val) => config.face.mesh.enabled = val);
|
||||
settings.addBoolean('Face Iris', config.face.iris.enabled, (val) => config.face.iris.enabled = val);
|
||||
settings.addBoolean('Face Age', config.face.age.enabled, (val) => config.face.age.enabled = val);
|
||||
settings.addBoolean('Face Gender', config.face.gender.enabled, (val) => config.face.gender.enabled = val);
|
||||
settings.addBoolean('Body Pose', config.body.enabled, (val) => config.body.enabled = val);
|
||||
settings.addBoolean('Hand Pose', config.hand.enabled, (val) => config.hand.enabled = val);
|
||||
settings.addHTML('line3', '<hr>'); settings.hideTitle('line3');
|
||||
settings.addRange('Max Objects', 1, 20, 5, 1, (val) => {
|
||||
config.face.detector.maxFaces = parseInt(val);
|
||||
config.body.maxDetections = parseInt(val);
|
||||
});
|
||||
settings.addRange('Skip Frames', 1, 20, config.face.detector.skipFrames, 1, (val) => {
|
||||
config.face.detector.skipFrames = parseInt(val);
|
||||
config.face.age.skipFrames = parseInt(val);
|
||||
config.hand.skipFrames = parseInt(val);
|
||||
});
|
||||
settings.addRange('Min Confidence', 0.1, 1.0, config.face.detector.minConfidence, 0.05, (val) => {
|
||||
config.face.detector.minConfidence = parseFloat(val);
|
||||
config.hand.minConfidence = parseFloat(val);
|
||||
});
|
||||
settings.addRange('Score Threshold', 0.1, 1.0, config.face.detector.scoreThreshold, 0.05, (val) => {
|
||||
config.face.detector.scoreThreshold = parseFloat(val);
|
||||
config.hand.scoreThreshold = parseFloat(val);
|
||||
config.body.scoreThreshold = parseFloat(val);
|
||||
});
|
||||
settings.addRange('IOU Threshold', 0.1, 1.0, config.face.detector.iouThreshold, 0.05, (val) => {
|
||||
config.face.detector.iouThreshold = parseFloat(val);
|
||||
config.hand.iouThreshold = parseFloat(val);
|
||||
});
|
||||
}
|
||||
|
||||
async function setupCanvas() {
|
||||
const video = document.getElementById('video');
|
||||
const canvas = document.getElementById('canvas');
|
||||
canvas.width = video.width;
|
||||
canvas.height = video.height;
|
||||
settings = QuickSettings.create(10, 10, 'Settings', document.getElementById('main'));
|
||||
}
|
||||
|
||||
async function setupCamera() {
|
||||
const video = document.getElementById('video');
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: false,
|
||||
video: { facingMode: 'user', width: window.innerWidth, height: window.innerHeight },
|
||||
});
|
||||
video.srcObject = stream;
|
||||
return new Promise((resolve) => {
|
||||
video.onloadedmetadata = () => {
|
||||
resolve(video);
|
||||
video.width = video.videoWidth;
|
||||
video.height = video.videoHeight;
|
||||
video.play();
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await human.tf.setBackend('webgl');
|
||||
await human.tf.ready();
|
||||
await setupCamera();
|
||||
await setupCanvas();
|
||||
await setupGUI();
|
||||
runHumanDetect();
|
||||
}
|
||||
|
||||
window.onload = main;
|
127
demo/index.js
127
demo/index.js
|
@ -1,127 +0,0 @@
|
|||
/* eslint-disable no-return-assign */
|
||||
/* global tf, human, QuickSettings */
|
||||
|
||||
let paused = false;
|
||||
let video;
|
||||
let canvas;
|
||||
let ctx;
|
||||
|
||||
const config = {
|
||||
face: {
|
||||
enabled: true,
|
||||
detector: { maxFaces: 10, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: true },
|
||||
age: { enabled: false, skipFrames: 5 },
|
||||
gender: { enabled: false },
|
||||
},
|
||||
body: { enabled: false, maxDetections: 5, scoreThreshold: 0.75, nmsRadius: 20 },
|
||||
hand: { enabled: false, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
||||
};
|
||||
|
||||
async function drawFace(faces) {
|
||||
for (const face of faces) {
|
||||
ctx.drawImage(video, 0, 0, video.width, video.height, 0, 0, canvas.width, canvas.height);
|
||||
ctx.beginPath();
|
||||
ctx.rect(face.box[0], face.box[1], face.box[2], face.box[3]);
|
||||
ctx.fillText(`face ${face.gender || ''} ${face.age || ''} ${face.iris ? 'iris: ' + face.iris : ''}`, face.box[0] + 2, face.box[1] + 16, face.box[2]);
|
||||
ctx.stroke();
|
||||
if (face.mesh) {
|
||||
for (const point of face.mesh) {
|
||||
ctx.fillStyle = `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)`;
|
||||
ctx.beginPath();
|
||||
ctx.arc(point[0], point[1], 1 /* radius */, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function drawBody(people) {
|
||||
//
|
||||
}
|
||||
|
||||
async function drawHand(hands) {
|
||||
//
|
||||
}
|
||||
|
||||
async function runHumanDetect() {
|
||||
const result = await human.detect(video, config);
|
||||
drawFace(result.face);
|
||||
drawBody(result.body);
|
||||
drawHand(result.hand);
|
||||
if (!paused) requestAnimationFrame(runHumanDetect);
|
||||
}
|
||||
|
||||
function setupGUI() {
|
||||
const settings = QuickSettings.create(10, 10, 'Settings', document.getElementById('main'));
|
||||
settings.addBoolean('Pause', paused, (val) => { paused = val; runHumanDetect(); });
|
||||
settings.addBoolean('Face Detect', config.face.enabled, (val) => config.face.enabled = val);
|
||||
settings.addBoolean('Face Mesh', config.face.mesh.enabled, (val) => config.face.mesh.enabled = val);
|
||||
settings.addBoolean('Face Iris', config.face.iris.enabled, (val) => config.face.iris.enabled = val);
|
||||
settings.addBoolean('Face Age', config.face.age.enabled, (val) => config.face.age.enabled = val);
|
||||
settings.addBoolean('Face Gender', config.face.gender.enabled, (val) => config.face.gender.enabled = val);
|
||||
settings.addBoolean('Body Pose', config.body.enabled, (val) => config.body.enabled = val);
|
||||
settings.addBoolean('Hand Pose', config.hand.enabled, (val) => config.hand.enabled = val);
|
||||
settings.addRange('Max Objects', 1, 20, 5, 1, (val) => {
|
||||
config.face.detector.maxFaces = parseInt(val);
|
||||
config.body.maxDetections = parseInt(val);
|
||||
});
|
||||
settings.addRange('Skip Frames', 1, 20, config.face.detector.skipFrames, 1, (val) => {
|
||||
config.face.detector.skipFrames = parseInt(val);
|
||||
config.face.age.skipFrames = parseInt(val);
|
||||
config.hand.skipFrames = parseInt(val);
|
||||
});
|
||||
settings.addRange('Min Confidence', 0.1, 1.0, config.face.detector.minConfidence, 0.05, (val) => {
|
||||
config.face.detector.minConfidence = parseFloat(val);
|
||||
config.hand.minConfidence = parseFloat(val);
|
||||
});
|
||||
settings.addRange('Score Threshold', 0.1, 1.0, config.face.detector.scoreThreshold, 0.05, (val) => {
|
||||
config.face.detector.scoreThreshold = parseFloat(val);
|
||||
config.hand.scoreThreshold = parseFloat(val);
|
||||
config.body.scoreThreshold = parseFloat(val);
|
||||
});
|
||||
settings.addRange('IOU Threshold', 0.1, 1.0, config.face.detector.iouThreshold, 0.05, (val) => {
|
||||
config.face.detector.iouThreshold = parseFloat(val);
|
||||
config.hand.iouThreshold = parseFloat(val);
|
||||
});
|
||||
}
|
||||
|
||||
async function setupCanvas() {
|
||||
canvas = document.getElementById('canvas');
|
||||
canvas.width = video.width;
|
||||
canvas.height = video.height;
|
||||
ctx = canvas.getContext('2d');
|
||||
ctx.fillStyle = 'lightblue';
|
||||
ctx.strokeStyle = 'lightblue';
|
||||
ctx.lineWidth = 1;
|
||||
ctx.font = 'small-caps 1rem "Segoe UI"';
|
||||
}
|
||||
|
||||
async function setupCamera() {
|
||||
video = document.getElementById('video');
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: false,
|
||||
video: { facingMode: 'user', width: window.innerWidth, height: window.innerHeight },
|
||||
});
|
||||
video.srcObject = stream;
|
||||
return new Promise((resolve) => {
|
||||
video.onloadedmetadata = () => {
|
||||
resolve(video);
|
||||
video.width = video.videoWidth;
|
||||
video.height = video.videoHeight;
|
||||
video.play();
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await tf.setBackend('webgl');
|
||||
await tf.ready();
|
||||
await setupGUI();
|
||||
await setupCamera();
|
||||
await setupCanvas();
|
||||
runHumanDetect();
|
||||
}
|
||||
|
||||
window.onload = main;
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -34,7 +34,7 @@
|
|||
},
|
||||
"scripts": {
|
||||
"build": "rimraf dist/ && npm run build-esm && npm run build-iife",
|
||||
"build-esm": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --external:@tensorflow --outfile=dist/human.esm.js src/index.js",
|
||||
"build-esm": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --outfile=dist/human.esm.js src/index.js",
|
||||
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --global-name=human --outfile=dist/human.js src/index.js"
|
||||
},
|
||||
"keywords": [
|
||||
|
|
|
@ -72,7 +72,9 @@ class MediaPipeFaceMesh {
|
|||
tf.dispose(prediction.confidence);
|
||||
tf.dispose(prediction.image);
|
||||
tf.dispose(prediction.coords);
|
||||
tf.dispose(prediction);
|
||||
}
|
||||
tf.dispose(predictions);
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
|
127
src/image.js
127
src/image.js
|
@ -1,127 +0,0 @@
|
|||
const defaultFont = 'small-caps 1rem "Segoe UI"';
|
||||
|
||||
function clear(canvas) {
|
||||
if (canvas) canvas.getContext('2d').clearRect(0, 0, canvas.width, canvas.height);
|
||||
}
|
||||
|
||||
function crop(image, x, y, width, height, { color = 'white', title = null, font = null }) {
|
||||
const canvas = new OffscreenCanvas(width, height);
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.drawImage(image, x, y, width, height, 0, 0, canvas.width, canvas.height);
|
||||
ctx.fillStyle = color;
|
||||
ctx.font = font || defaultFont;
|
||||
if (title) ctx.fillText(title, 2, 16, canvas.width - 4);
|
||||
return canvas;
|
||||
}
|
||||
|
||||
function point({ canvas = null, x = 0, y = 0, color = 'white', radius = 2, title = null, font = null }) {
|
||||
if (!canvas) return;
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.fillStyle = color;
|
||||
ctx.beginPath();
|
||||
ctx.arc(x, y, radius, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
ctx.font = font || defaultFont;
|
||||
if (title) ctx.fillText(title, x + 10, y + 4);
|
||||
}
|
||||
|
||||
function rect({ canvas = null, x = 0, y = 0, width = 0, height = 0, radius = 8, lineWidth = 2, color = 'white', title = null, font = null }) {
|
||||
if (!canvas) return;
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.lineWidth = lineWidth;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x + radius, y);
|
||||
ctx.lineTo(x + width - radius, y);
|
||||
ctx.quadraticCurveTo(x + width, y, x + width, y + radius);
|
||||
ctx.lineTo(x + width, y + height - radius);
|
||||
ctx.quadraticCurveTo(x + width, y + height, x + width - radius, y + height);
|
||||
ctx.lineTo(x + radius, y + height);
|
||||
ctx.quadraticCurveTo(x, y + height, x, y + height - radius);
|
||||
ctx.lineTo(x, y + radius);
|
||||
ctx.quadraticCurveTo(x, y, x + radius, y);
|
||||
ctx.closePath();
|
||||
ctx.strokeStyle = color;
|
||||
ctx.stroke();
|
||||
ctx.lineWidth = 1;
|
||||
ctx.fillStyle = color;
|
||||
ctx.font = font || defaultFont;
|
||||
if (title) ctx.fillText(title, x + 4, y + 16);
|
||||
}
|
||||
|
||||
function line({ points = [], canvas = null, lineWidth = 2, color = 'white', title = null, font = null }) {
|
||||
if (!canvas) return;
|
||||
if (points.length < 2) return;
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.lineWidth = lineWidth;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(points[0][0], points[0][1]);
|
||||
for (const pt of points) ctx.lineTo(pt[0], pt[1]);
|
||||
ctx.strokeStyle = color;
|
||||
ctx.fillStyle = color;
|
||||
ctx.stroke();
|
||||
ctx.lineWidth = 1;
|
||||
ctx.font = font || defaultFont;
|
||||
if (title) ctx.fillText(title, points[0][0] + 4, points[0][1] + 16);
|
||||
}
|
||||
|
||||
function spline({ points = [], canvas = null, tension = 0.5, lineWidth = 2, color = 'white', title = null, font = null }) {
|
||||
if (!canvas) return;
|
||||
if (points.length < 2) return;
|
||||
const va = (arr, i, j) => [arr[2 * j] - arr[2 * i], arr[2 * j + 1] - arr[2 * i + 1]];
|
||||
const distance = (arr, i, j) => Math.sqrt(((arr[2 * i] - arr[2 * j]) ** 2) + ((arr[2 * i + 1] - arr[2 * j + 1]) ** 2));
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
function ctlpts(x1, y1, x2, y2, x3, y3) {
|
||||
// eslint-disable-next-line prefer-rest-params
|
||||
const v = va(arguments, 0, 2);
|
||||
// eslint-disable-next-line prefer-rest-params
|
||||
const d01 = distance(arguments, 0, 1);
|
||||
// eslint-disable-next-line prefer-rest-params
|
||||
const d12 = distance(arguments, 1, 2);
|
||||
const d012 = d01 + d12;
|
||||
return [
|
||||
x2 - v[0] * tension * d01 / d012, y2 - v[1] * tension * d01 / d012,
|
||||
x2 + v[0] * tension * d12 / d012, y2 + v[1] * tension * d12 / d012,
|
||||
];
|
||||
}
|
||||
const pts = [];
|
||||
for (const pt of points) {
|
||||
pts.push(pt[0]);
|
||||
pts.push(pt[1]);
|
||||
}
|
||||
let cps = [];
|
||||
for (let i = 0; i < pts.length - 2; i += 1) {
|
||||
cps = cps.concat(ctlpts(pts[2 * i + 0], pts[2 * i + 1], pts[2 * i + 2], pts[2 * i + 3], pts[2 * i + 4], pts[2 * i + 5]));
|
||||
}
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.lineWidth = lineWidth;
|
||||
ctx.strokeStyle = color;
|
||||
ctx.fillStyle = color;
|
||||
if (points.length === 2) {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(pts[0], pts[1]);
|
||||
ctx.lineTo(pts[2], pts[3]);
|
||||
} else {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(pts[0], pts[1]);
|
||||
// first segment is a quadratic
|
||||
ctx.quadraticCurveTo(cps[0], cps[1], pts[2], pts[3]);
|
||||
// for all middle points, connect with bezier
|
||||
let i;
|
||||
for (i = 2; i < ((pts.length / 2) - 1); i += 1) {
|
||||
ctx.bezierCurveTo(cps[(2 * (i - 1) - 1) * 2], cps[(2 * (i - 1) - 1) * 2 + 1], cps[(2 * (i - 1)) * 2], cps[(2 * (i - 1)) * 2 + 1], pts[i * 2], pts[i * 2 + 1]);
|
||||
}
|
||||
// last segment is a quadratic
|
||||
ctx.quadraticCurveTo(cps[(2 * (i - 1) - 1) * 2], cps[(2 * (i - 1) - 1) * 2 + 1], pts[i * 2], pts[i * 2 + 1]);
|
||||
}
|
||||
ctx.stroke();
|
||||
ctx.lineWidth = 1;
|
||||
ctx.font = font || defaultFont;
|
||||
if (title) ctx.fillText(title, points[0][0] + 4, points[0][1] + 16);
|
||||
}
|
||||
|
||||
exports.crop = crop;
|
||||
exports.rect = rect;
|
||||
exports.point = point;
|
||||
exports.line = line;
|
||||
exports.spline = spline;
|
||||
exports.clear = clear;
|
|
@ -1,9 +1,8 @@
|
|||
const tf = require('@tensorflow/tfjs');
|
||||
const facemesh = require('./facemesh/index.js');
|
||||
const ssrnet = require('./ssrnet/index.js');
|
||||
const posenet = require('./posenet/index.js');
|
||||
const handpose = require('./handpose/index.js');
|
||||
// const image = require('./image.js');
|
||||
// const triangulation = require('./triangulation.js').default;
|
||||
const defaults = require('./config.js').default;
|
||||
|
||||
const models = {
|
||||
|
@ -83,3 +82,4 @@ exports.facemesh = facemesh;
|
|||
exports.ssrnet = ssrnet;
|
||||
exports.posenet = posenet;
|
||||
exports.handpose = handpose;
|
||||
exports.tf = tf;
|
||||
|
|
Loading…
Reference in New Issue