mirror of https://github.com/vladmandic/human
updated demo
parent
de01483bc5
commit
937a97f0d6
|
@ -1,25 +1,12 @@
|
|||
<head>
|
||||
<script src="https://cdn.jsdelivr.net/npm/three@0.106.2/build/three.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/scatter-gl@0.0.1/lib/scatter-gl.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/tensorflow/2.6.0/tf.es2017.min.js"></script>
|
||||
<style>
|
||||
.canvas-wrapper { display: inline-block; vertical-align: top; }
|
||||
#scatter-gl-container { display: inline-block; vertical-align: top; border: solid 1px black; position: relative; }
|
||||
#scatter-gl-container canvas { transform: translate3d(-50%, -50%, 0); left: 50%; top: 50%; position: absolute; }
|
||||
</style>
|
||||
<script src="https://cdn.jsdelivr.net/npm/quicksettings@latest/quicksettings.min.js"></script>
|
||||
<script src="../dist/human.js"></script>
|
||||
<script src="./index.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<body style="margin: 0; background: black">
|
||||
<div id="main">
|
||||
<div class="container">
|
||||
<div class="canvas-wrapper">
|
||||
<canvas id="output"></canvas>
|
||||
<video id="video" playsinline style="visibility: hidden; width: auto; height: auto">
|
||||
</video>
|
||||
</div>
|
||||
<div id="scatter-gl-container"></div>
|
||||
<div id="faces"></div>
|
||||
</div>
|
||||
<video id="video" playsinline style="display: none"></video>
|
||||
<canvas id="canvas"></canvas>
|
||||
</div>
|
||||
</body>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/dat-gui/0.7.6/dat.gui.min.js"></script>
|
||||
<script type="module" src="./index.js"></script>
|
||||
|
|
189
demo/index.js
189
demo/index.js
|
@ -1,120 +1,127 @@
|
|||
/* global tf, ScatterGL, dat */
|
||||
/* eslint-disable no-return-assign */
|
||||
/* global tf, human, QuickSettings */
|
||||
|
||||
import human from '../dist/human.esm.js';
|
||||
|
||||
const state = {
|
||||
backend: 'webgl',
|
||||
triangulateMesh: true,
|
||||
renderPointcloud: true,
|
||||
stop: false,
|
||||
videoSize: 700,
|
||||
};
|
||||
const options = {
|
||||
};
|
||||
|
||||
let ctx;
|
||||
let videoWidth;
|
||||
let videoHeight;
|
||||
let paused = false;
|
||||
let video;
|
||||
let canvas;
|
||||
let scatterGLHasInitialized = false;
|
||||
let scatterGL;
|
||||
let ctx;
|
||||
|
||||
async function renderPrediction() {
|
||||
const predictions = await human.detect(video);
|
||||
ctx.drawImage(video, 0, 0, videoWidth, videoHeight, 0, 0, canvas.width, canvas.height);
|
||||
const div = document.getElementById('faces');
|
||||
div.innerHTML = '';
|
||||
for (const prediction of predictions) {
|
||||
div.appendChild(prediction.canvas);
|
||||
const config = {
|
||||
face: {
|
||||
enabled: true,
|
||||
detector: { maxFaces: 10, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: true },
|
||||
age: { enabled: false, skipFrames: 5 },
|
||||
gender: { enabled: false },
|
||||
},
|
||||
body: { enabled: false, maxDetections: 5, scoreThreshold: 0.75, nmsRadius: 20 },
|
||||
hand: { enabled: false, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
||||
};
|
||||
|
||||
async function drawFace(faces) {
|
||||
for (const face of faces) {
|
||||
ctx.drawImage(video, 0, 0, video.width, video.height, 0, 0, canvas.width, canvas.height);
|
||||
ctx.beginPath();
|
||||
ctx.rect(prediction.box[0], prediction.box[1], prediction.box[2], prediction.box[3]);
|
||||
ctx.font = 'small-caps 1rem "Segoe UI"';
|
||||
ctx.fillText(`${prediction.gender} ${prediction.age}`, prediction.box[0] + 2, prediction.box[1] + 16, prediction.box[2]);
|
||||
ctx.rect(face.box[0], face.box[1], face.box[2], face.box[3]);
|
||||
ctx.fillText(`face ${face.gender || ''} ${face.age || ''} ${face.iris ? 'iris: ' + face.iris : ''}`, face.box[0] + 2, face.box[1] + 16, face.box[2]);
|
||||
ctx.stroke();
|
||||
if (state.triangulateMesh) {
|
||||
for (let i = 0; i < human.triangulation.length / 3; i++) {
|
||||
const points = [human.triangulation[i * 3], human.triangulation[i * 3 + 1], human.triangulation[i * 3 + 2]].map((index) => prediction.mesh[index]);
|
||||
const region = new Path2D();
|
||||
region.moveTo(points[0][0], points[0][1]);
|
||||
for (let j = 1; i < points.length; j++) region.lineTo(points[j][0], points[j][1]);
|
||||
region.closePath();
|
||||
ctx.stroke(region);
|
||||
}
|
||||
} else {
|
||||
for (let i = 0; i < prediction.mesh.length; i++) {
|
||||
const x = prediction.mesh[i][0];
|
||||
const y = prediction.mesh[i][1];
|
||||
if (face.mesh) {
|
||||
for (const point of face.mesh) {
|
||||
ctx.fillStyle = `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)`;
|
||||
ctx.beginPath();
|
||||
ctx.arc(x, y, 1 /* radius */, 0, 2 * Math.PI);
|
||||
ctx.arc(point[0], point[1], 1 /* radius */, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
if (state.renderPointcloud && scatterGL != null) {
|
||||
const pointsData = predictions.map((pred) => pred.mesh.map((point) => ([-point[0], -point[1], -point[2]])));
|
||||
let flattenedPointsData = [];
|
||||
for (let i = 0; i < pointsData.length; i++) {
|
||||
flattenedPointsData = flattenedPointsData.concat(pointsData[i]);
|
||||
}
|
||||
const dataset = new ScatterGL.Dataset(flattenedPointsData);
|
||||
if (!scatterGLHasInitialized) scatterGL.render(dataset);
|
||||
else scatterGL.updateDataset(dataset);
|
||||
scatterGLHasInitialized = true;
|
||||
}
|
||||
}
|
||||
if (!state.stop) requestAnimationFrame(renderPrediction);
|
||||
}
|
||||
|
||||
function setupDatGui() {
|
||||
const gui = new dat.GUI();
|
||||
gui.add(state, 'stop').onChange(() => { renderPrediction(); });
|
||||
gui.add(state, 'backend', ['webgl', 'cpu']).onChange((backend) => { tf.setBackend(backend); });
|
||||
gui.add(options, 'maxFaces', 1, 100, 1).onChange(() => { human.load(options); });
|
||||
gui.add(options, 'detectionConfidence', 0, 1, 0.05).onChange(() => { human.load(options); });
|
||||
gui.add(options, 'iouThreshold', 0, 1, 0.05).onChange(() => { human.load(options); });
|
||||
gui.add(options, 'scoreThreshold', 0, 1, 0.05).onChange(() => { human.load(options); });
|
||||
gui.add(state, 'triangulateMesh');
|
||||
gui.add(state, 'renderPointcloud').onChange((render) => { document.querySelector('#scatter-gl-container').style.display = render ? 'inline-block' : 'none'; });
|
||||
async function drawBody(people) {
|
||||
//
|
||||
}
|
||||
|
||||
async function drawHand(hands) {
|
||||
//
|
||||
}
|
||||
|
||||
async function runHumanDetect() {
|
||||
const result = await human.detect(video, config);
|
||||
drawFace(result.face);
|
||||
drawBody(result.body);
|
||||
drawHand(result.hand);
|
||||
if (!paused) requestAnimationFrame(runHumanDetect);
|
||||
}
|
||||
|
||||
function setupGUI() {
|
||||
const settings = QuickSettings.create(10, 10, 'Settings', document.getElementById('main'));
|
||||
settings.addBoolean('Pause', paused, (val) => { paused = val; runHumanDetect(); });
|
||||
settings.addBoolean('Face Detect', config.face.enabled, (val) => config.face.enabled = val);
|
||||
settings.addBoolean('Face Mesh', config.face.mesh.enabled, (val) => config.face.mesh.enabled = val);
|
||||
settings.addBoolean('Face Iris', config.face.iris.enabled, (val) => config.face.iris.enabled = val);
|
||||
settings.addBoolean('Face Age', config.face.age.enabled, (val) => config.face.age.enabled = val);
|
||||
settings.addBoolean('Face Gender', config.face.gender.enabled, (val) => config.face.gender.enabled = val);
|
||||
settings.addBoolean('Body Pose', config.body.enabled, (val) => config.body.enabled = val);
|
||||
settings.addBoolean('Hand Pose', config.hand.enabled, (val) => config.hand.enabled = val);
|
||||
settings.addRange('Max Objects', 1, 20, 5, 1, (val) => {
|
||||
config.face.detector.maxFaces = parseInt(val);
|
||||
config.body.maxDetections = parseInt(val);
|
||||
});
|
||||
settings.addRange('Skip Frames', 1, 20, config.face.detector.skipFrames, 1, (val) => {
|
||||
config.face.detector.skipFrames = parseInt(val);
|
||||
config.face.age.skipFrames = parseInt(val);
|
||||
config.hand.skipFrames = parseInt(val);
|
||||
});
|
||||
settings.addRange('Min Confidence', 0.1, 1.0, config.face.detector.minConfidence, 0.05, (val) => {
|
||||
config.face.detector.minConfidence = parseFloat(val);
|
||||
config.hand.minConfidence = parseFloat(val);
|
||||
});
|
||||
settings.addRange('Score Threshold', 0.1, 1.0, config.face.detector.scoreThreshold, 0.05, (val) => {
|
||||
config.face.detector.scoreThreshold = parseFloat(val);
|
||||
config.hand.scoreThreshold = parseFloat(val);
|
||||
config.body.scoreThreshold = parseFloat(val);
|
||||
});
|
||||
settings.addRange('IOU Threshold', 0.1, 1.0, config.face.detector.iouThreshold, 0.05, (val) => {
|
||||
config.face.detector.iouThreshold = parseFloat(val);
|
||||
config.hand.iouThreshold = parseFloat(val);
|
||||
});
|
||||
}
|
||||
|
||||
async function setupCanvas() {
|
||||
canvas = document.getElementById('canvas');
|
||||
canvas.width = video.width;
|
||||
canvas.height = video.height;
|
||||
ctx = canvas.getContext('2d');
|
||||
ctx.fillStyle = 'lightblue';
|
||||
ctx.strokeStyle = 'lightblue';
|
||||
ctx.lineWidth = 1;
|
||||
ctx.font = 'small-caps 1rem "Segoe UI"';
|
||||
}
|
||||
|
||||
async function setupCamera() {
|
||||
video = document.getElementById('video');
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: false,
|
||||
video: { facingMode: 'user', width: state.videoSize, height: state.videoSize },
|
||||
video: { facingMode: 'user', width: window.innerWidth, height: window.innerHeight },
|
||||
});
|
||||
video.srcObject = stream;
|
||||
return new Promise((resolve) => {
|
||||
video.onloadedmetadata = () => resolve(video);
|
||||
video.onloadedmetadata = () => {
|
||||
resolve(video);
|
||||
video.width = video.videoWidth;
|
||||
video.height = video.videoHeight;
|
||||
video.play();
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await tf.setBackend(state.backend);
|
||||
setupDatGui();
|
||||
await tf.setBackend('webgl');
|
||||
await tf.ready();
|
||||
await setupGUI();
|
||||
await setupCamera();
|
||||
video.play();
|
||||
videoWidth = video.videoWidth;
|
||||
videoHeight = video.videoHeight;
|
||||
video.width = videoWidth;
|
||||
video.height = videoHeight;
|
||||
canvas = document.getElementById('output');
|
||||
canvas.width = videoWidth;
|
||||
canvas.height = videoHeight;
|
||||
const canvasContainer = document.querySelector('.canvas-wrapper');
|
||||
canvasContainer.style = `width: ${videoWidth}px; height: ${videoHeight}px`;
|
||||
ctx = canvas.getContext('2d');
|
||||
// ctx.translate(canvas.width, 0);
|
||||
// ctx.scale(-1, 1);
|
||||
ctx.fillStyle = '#32EEDB';
|
||||
ctx.strokeStyle = '#32EEDB';
|
||||
ctx.lineWidth = 0.5;
|
||||
human.load(options);
|
||||
renderPrediction();
|
||||
if (state.renderPointcloud) {
|
||||
document.querySelector('#scatter-gl-container').style = `width: ${state.videoSize}px; height: ${state.videoSize}px;`;
|
||||
scatterGL = new ScatterGL(document.querySelector('#scatter-gl-container'), { rotateOnStart: false, selectEnabled: false });
|
||||
}
|
||||
await setupCanvas();
|
||||
runHumanDetect();
|
||||
}
|
||||
|
||||
main();
|
||||
window.onload = main;
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -3,8 +3,10 @@ const blazeface = require('../blazeface');
|
|||
const keypoints = require('./keypoints');
|
||||
const pipe = require('./pipeline');
|
||||
const uv_coords = require('./uvcoords');
|
||||
const triangulation = require('./triangulation').default;
|
||||
|
||||
exports.uv_coords = uv_coords;
|
||||
exports.triangulation = triangulation;
|
||||
|
||||
async function loadDetectorModel(config) {
|
||||
return blazeface.load(config);
|
||||
|
|
|
@ -16,7 +16,7 @@ const models = {
|
|||
function mergeDeep(...objects) {
|
||||
const isObject = (obj) => obj && typeof obj === 'object';
|
||||
return objects.reduce((prev, obj) => {
|
||||
Object.keys(obj).forEach((key) => {
|
||||
Object.keys(obj || {}).forEach((key) => {
|
||||
const pVal = prev[key];
|
||||
const oVal = obj[key];
|
||||
if (Array.isArray(pVal) && Array.isArray(oVal)) {
|
||||
|
@ -79,3 +79,7 @@ async function detect(input, userConfig) {
|
|||
exports.detect = detect;
|
||||
exports.defaults = defaults;
|
||||
exports.models = models;
|
||||
exports.facemesh = facemesh;
|
||||
exports.ssrnet = ssrnet;
|
||||
exports.posenet = posenet;
|
||||
exports.handpose = handpose;
|
||||
|
|
Loading…
Reference in New Issue