refactored package file layout

pull/50/head
Vladimir Mandic 2020-10-17 06:30:00 -04:00
parent c95d851506
commit effd8028dd
25 changed files with 7327 additions and 9801 deletions

View File

@ -41,8 +41,7 @@ There are multiple ways to use `Human` library, pick one that suits you:
- `dist/human.js`: IIFE format minified bundle with TFJS for Browsers
- `dist/human.esm.js`: ESM format minified bundle with TFJS for Browsers
- `dist/human.esm-nobundle.js`: ESM format non-minified bundle without TFJS for Browsers
- `dist/human.cjs`: CommonJS format minified bundle with TFJS for NodeJS
- `dist/human-nobundle.cjs`: CommonJS format non-minified bundle without TFJS for NodeJS
- `dist/human.cjs`: CommonJS format non-minified bundle without TFJS for NodeJS
All versions include `sourcemap`
@ -116,21 +115,13 @@ Entry point is bundle in CJS format `dist/human.node.js`
You also need to install and include `tfjs-node` or `tfjs-node-gpu` in your project so it can register an optimized backend
Install with:
```shell
npm install @vladmandic/human
```
And then use with:
```js
const human = require('@vladmandic/human'); // points to @vladmandic/human/dist/human.cjs
```
or
```shell
npm install @vladmandic/human @tensorflow/tfjs-node
```
And then use with:
```js
const tf = require('@tensorflow/tfjs-node'); // can also use '@tensorflow/tfjs-node-gpu' if you have environment with CUDA extensions
const human = require('@vladmandic/human/dist/human-nobundle.cjs');
const human = require('@vladmandic/human'); // points to @vladmandic/human/dist/human.cjs
```
@ -164,11 +155,10 @@ If your application resides in a different folder, modify `modelPath` property i
Demos are included in `/demo`:
Browser:
- `demo-esm`: Full demo using Browser with ESM module, includes selectable backends and webworkers
- `demo-iife`: Older demo using Browser with IIFE module
- `index.html`, `browser.js`, `worker.js`: Full demo using Browser with ESM module, includes selectable backends and webworkers
NodeJS:
- `demo-node`: Demo using NodeJS with CJS module
- `node.js`: Demo using NodeJS with CJS module
This is a very simple demo as althought `Human` library is compatible with NodeJS execution
and is able to load images and models from local filesystem,

View File

@ -11,6 +11,7 @@ const ui = {
columns: 3,
busy: false,
facing: 'user',
worker: 'worker.js',
};
const config = {
@ -243,7 +244,7 @@ function webWorker(input, image, canvas) {
if (!worker) {
// create new webworker and add event handler only once
log('Creating worker thread');
worker = new Worker('demo-esm-webworker.js', { type: 'module' });
worker = new Worker(ui.worker, { type: 'module' });
// after receiving message from webworker, parse&draw results and send new frame for processing
worker.addEventListener('message', (msg) => drawResults(input, msg.data, canvas));
}

View File

@ -1,15 +0,0 @@
<html>
<head>
<script src="../assets/quicksettings.js"></script>
<!-- <script src="../assets/tf.min.js"></script> -->
<!-- <script src="../assets/tf-backend-wasm.min.js"></script> -->
<script src="./demo-esm.js" type="module"></script>
</head>
<body style="margin: 0; background: black; color: white; font-family: 'Segoe UI'; font-size: 16px; font-variant: small-caps; overflow-x: hidden">
<video id="video" playsinline style="display: none"></video>
<image id="image" src="" style="display: none"></video>
<canvas id="canvas"></canvas>
<div id="samples" style="display: flex; flex-wrap: wrap"></div>
<div id="log" style="position: fixed; bottom: 0">Human library</div>
</body>
</html>

View File

@ -1,422 +0,0 @@
<head>
<script src="https://cdn.jsdelivr.net/npm/quicksettings@latest/quicksettings.min.js"></script>
<script src="../dist/human.js"></script>
</head>
<body style="margin: 0; background: black; color: white; font-family: 'Segoe UI'">
<div id="main">
<video id="video" playsinline style="display: none"></video>
<image id="image" src="" style="display: none"></video>
<canvas id="canvas"></canvas>
<div id="log">Starting Human library</div>
</div>
<script>
/* global QuickSettings */
const ui = {
baseColor: 'rgba(255, 200, 255, 0.3)',
baseLabel: 'rgba(255, 200, 255, 0.8)',
baseFont: 'small-caps 1.2rem "Segoe UI"',
baseLineWidth: 16,
};
const config = {
backend: 'webgl',
console: true,
face: {
enabled: true,
detector: { maxFaces: 10, skipFrames: 10, minConfidence: 0.5, iouThreshold: 0.3, scoreThreshold: 0.7 },
mesh: { enabled: true },
iris: { enabled: true },
age: { enabled: true, skipFrames: 10 },
gender: { enabled: true },
emotion: { enabled: true, minConfidence: 0.5, useGrayscale: true },
},
body: { enabled: true, maxDetections: 10, scoreThreshold: 0.7, nmsRadius: 20 },
hand: { enabled: true, skipFrames: 10, minConfidence: 0.5, iouThreshold: 0.3, scoreThreshold: 0.7 },
};
let settings;
let worker;
let timeStamp;
const fps = [];
function str(...msg) {
if (!Array.isArray(msg)) return msg;
let line = '';
for (const entry of msg) {
if (typeof entry === 'object') line += JSON.stringify(entry).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ');
else line += entry;
}
return line;
}
const log = (...msg) => {
// eslint-disable-next-line no-console
if (config.console) console.log(...msg);
};
async function drawFace(result, canvas) {
if (!result) return;
const ctx = canvas.getContext('2d');
ctx.strokeStyle = ui.baseColor;
ctx.font = ui.baseFont;
for (const face of result) {
ctx.fillStyle = ui.baseColor;
ctx.lineWidth = ui.baseLineWidth;
ctx.beginPath();
if (settings.getValue('Draw Boxes')) {
ctx.rect(face.box[0], face.box[1], face.box[2], face.box[3]);
}
const labelAgeGender = `${face.gender || ''} ${face.age || ''}`;
const labelIris = face.iris ? `iris: ${face.iris}` : '';
const labelEmotion = face.emotion && face.emotion[0] ? `emotion: ${Math.trunc(100 * face.emotion[0].score)}% ${face.emotion[0].emotion}` : '';
ctx.fillStyle = ui.baseLabel;
ctx.fillText(`${Math.trunc(100 * face.confidence)}% face ${labelAgeGender} ${labelIris} ${labelEmotion}`, face.box[0] + 2, face.box[1] + 22);
ctx.stroke();
ctx.lineWidth = 1;
if (face.mesh) {
if (settings.getValue('Draw Points')) {
for (const point of face.mesh) {
ctx.fillStyle = `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)`;
ctx.beginPath();
ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);
ctx.fill();
}
}
if (settings.getValue('Draw Polygons')) {
for (let i = 0; i < human.facemesh.triangulation.length / 3; i++) {
const points = [
human.facemesh.triangulation[i * 3 + 0],
human.facemesh.triangulation[i * 3 + 1],
human.facemesh.triangulation[i * 3 + 2],
].map((index) => face.mesh[index]);
const path = new Path2D();
path.moveTo(points[0][0], points[0][1]);
for (const point of points) {
path.lineTo(point[0], point[1]);
}
path.closePath();
ctx.strokeStyle = `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.3)`;
ctx.stroke(path);
if (settings.getValue('Fill Polygons')) {
ctx.fillStyle = `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.3)`;
ctx.fill(path);
}
}
}
}
}
}
async function drawBody(result, canvas) {
if (!result) return;
const ctx = canvas.getContext('2d');
ctx.fillStyle = ui.baseColor;
ctx.strokeStyle = ui.baseColor;
ctx.font = ui.baseFont;
ctx.lineWidth = ui.baseLineWidth;
for (const pose of result) {
if (settings.getValue('Draw Points')) {
for (const point of pose.keypoints) {
ctx.beginPath();
ctx.arc(point.position.x, point.position.y, 2, 0, 2 * Math.PI);
ctx.fill();
}
}
if (settings.getValue('Draw Polygons')) {
const path = new Path2D();
let part;
// torso
part = pose.keypoints.find((a) => a.part === 'leftShoulder');
path.moveTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'rightShoulder');
path.lineTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'rightHip');
path.lineTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'leftHip');
path.lineTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'leftShoulder');
path.lineTo(part.position.x, part.position.y);
// legs
part = pose.keypoints.find((a) => a.part === 'leftHip');
path.moveTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'leftKnee');
path.lineTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'leftAnkle');
path.lineTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'rightHip');
path.moveTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'rightKnee');
path.lineTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'rightAnkle');
path.lineTo(part.position.x, part.position.y);
// arms
part = pose.keypoints.find((a) => a.part === 'leftShoulder');
path.moveTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'leftElbow');
path.lineTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'leftWrist');
path.lineTo(part.position.x, part.position.y);
// arms
part = pose.keypoints.find((a) => a.part === 'rightShoulder');
path.moveTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'rightElbow');
path.lineTo(part.position.x, part.position.y);
part = pose.keypoints.find((a) => a.part === 'rightWrist');
path.lineTo(part.position.x, part.position.y);
// draw all
ctx.stroke(path);
}
}
}
async function drawHand(result, canvas) {
if (!result) return;
const ctx = canvas.getContext('2d');
ctx.font = ui.baseFont;
ctx.lineWidth = ui.baseLineWidth;
window.result = result;
for (const hand of result) {
if (settings.getValue('Draw Boxes')) {
ctx.lineWidth = ui.baseLineWidth;
ctx.beginPath();
ctx.fillStyle = ui.baseColor;
ctx.rect(hand.box[0], hand.box[1], hand.box[2], hand.box[3]);
ctx.fillStyle = ui.baseLabel;
ctx.fillText('hand', hand.box[0] + 2, hand.box[1] + 22, hand.box[2]);
ctx.stroke();
}
if (settings.getValue('Draw Points')) {
for (const point of hand.landmarks) {
ctx.fillStyle = `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)`;
ctx.beginPath();
ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);
ctx.fill();
}
}
if (settings.getValue('Draw Polygons')) {
const addPart = (part) => {
for (let i = 1; i < part.length; i++) {
ctx.lineWidth = ui.baseLineWidth;
ctx.beginPath();
ctx.strokeStyle = `rgba(${127.5 + (2 * part[i][2])}, ${127.5 - (2 * part[i][2])}, 255, 0.5)`;
ctx.moveTo(part[i - 1][0], part[i - 1][1]);
ctx.lineTo(part[i][0], part[i][1]);
ctx.stroke();
}
};
addPart(hand.annotations.indexFinger);
addPart(hand.annotations.middleFinger);
addPart(hand.annotations.ringFinger);
addPart(hand.annotations.pinky);
addPart(hand.annotations.thumb);
addPart(hand.annotations.palmBase);
}
}
}
async function drawResults(input, result, canvas) {
// update fps
settings.setValue('FPS', Math.round(1000 / (performance.now() - timeStamp)));
fps.push(1000 / (performance.now() - timeStamp));
if (fps.length > 20) fps.shift();
settings.setValue('FPS', Math.round(10 * fps.reduce((a, b) => a + b) / fps.length) / 10);
// eslint-disable-next-line no-use-before-define
requestAnimationFrame(() => runHumanDetect(input, canvas)); // immediate loop
// draw image from video
const ctx = canvas.getContext('2d');
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
// draw all results
drawFace(result.face, canvas);
drawBody(result.body, canvas);
drawHand(result.hand, canvas);
// update log
const engine = await human.tf.engine();
const memory = `${engine.state.numBytes.toLocaleString()} bytes ${engine.state.numDataBuffers.toLocaleString()} buffers ${engine.state.numTensors.toLocaleString()} tensors`;
const gpu = engine.backendInstance ? `GPU: ${engine.backendInstance.numBytesInGPU.toLocaleString()} bytes` : '';
document.getElementById('log').innerText = `
TFJS Version: ${human.tf.version_core} | Backend: ${human.tf.getBackend()} | Memory: ${memory} ${gpu}
Performance: ${str(result.performance)} | Object size: ${(str(result)).length.toLocaleString()} bytes
`;
}
// simple wrapper for worker.postmessage that creates worker if one does not exist
function webWorker(input, image, canvas) {
if (!worker) {
// create new webworker and add event handler only once
log('Creating worker thread');
worker = new Worker('demo-esm-webworker.js', { type: 'module' });
// after receiving message from webworker, parse&draw results and send new frame for processing
worker.addEventListener('message', (msg) => drawResults(input, msg.data, canvas));
}
// pass image data as arraybuffer to worker by reference to avoid copy
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
}
async function runHumanDetect(input, canvas) {
const live = input.srcObject ? ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused)) : false;
timeStamp = performance.now();
// perform detect if live video or not video at all
if (live || !(input instanceof HTMLVideoElement)) {
if (settings.getValue('Use Web Worker')) {
// get image data from video as we cannot send html objects to webworker
const offscreen = new OffscreenCanvas(canvas.width, canvas.height);
const ctx = offscreen.getContext('2d');
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
const data = ctx.getImageData(0, 0, canvas.width, canvas.height);
// perform detection in worker
webWorker(input, data, canvas);
} else {
let result = {};
try {
// perform detection
result = await human.detect(input, config);
} catch (err) {
log('Error during execution:', err.message);
}
drawResults(input, result, canvas);
}
}
}
function setupUI() {
// add all variables to ui control panel
settings = QuickSettings.create(10, 10, 'Settings', document.getElementById('main'));
const style = document.createElement('style');
// style.type = 'text/css';
style.innerHTML = `
.qs_main { font: 1rem "Segoe UI"; }
.qs_label { font: 0.8rem "Segoe UI"; }
.qs_title_bar { display: none; }
.qs_content { background: darkslategray; }
.qs_container { background: transparent; color: white; margin: 6px; padding: 6px; }
.qs_checkbox_label { top: 2px; }
.qs_button { width: -webkit-fill-available; font: 1rem "Segoe UI"; cursor: pointer; }
`;
document.getElementsByTagName('head')[0].appendChild(style);
settings.addButton('Play/Pause', () => {
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
if (!video.paused) {
document.getElementById('log').innerText = 'Paused ...';
video.pause();
} else {
document.getElementById('log').innerText = 'Starting Human Library ...';
video.play();
}
runHumanDetect(video, canvas);
});
settings.addDropDown('Backend', ['webgl', 'wasm', 'cpu'], async (val) => config.backend = val.value);
settings.addHTML('title', 'Enabled Models'); settings.hideTitle('title');
settings.addBoolean('Face Detect', config.face.enabled, (val) => config.face.enabled = val);
settings.addBoolean('Face Mesh', config.face.mesh.enabled, (val) => config.face.mesh.enabled = val);
settings.addBoolean('Face Iris', config.face.iris.enabled, (val) => config.face.iris.enabled = val);
settings.addBoolean('Face Age', config.face.age.enabled, (val) => config.face.age.enabled = val);
settings.addBoolean('Face Gender', config.face.gender.enabled, (val) => config.face.gender.enabled = val);
settings.addBoolean('Face Emotion', config.face.emotion.enabled, (val) => config.face.emotion.enabled = val);
settings.addBoolean('Body Pose', config.body.enabled, (val) => config.body.enabled = val);
settings.addBoolean('Hand Pose', config.hand.enabled, (val) => config.hand.enabled = val);
settings.addHTML('title', 'Model Parameters'); settings.hideTitle('title');
settings.addRange('Max Objects', 1, 20, 5, 1, (val) => {
config.face.detector.maxFaces = parseInt(val);
config.body.maxDetections = parseInt(val);
});
settings.addRange('Skip Frames', 1, 20, config.face.detector.skipFrames, 1, (val) => {
config.face.detector.skipFrames = parseInt(val);
config.face.emotion.skipFrames = parseInt(val);
config.face.age.skipFrames = parseInt(val);
config.hand.skipFrames = parseInt(val);
});
settings.addRange('Min Confidence', 0.1, 1.0, config.face.detector.minConfidence, 0.05, (val) => {
config.face.detector.minConfidence = parseFloat(val);
config.face.emotion.minConfidence = parseFloat(val);
config.hand.minConfidence = parseFloat(val);
});
settings.addRange('Score Threshold', 0.1, 1.0, config.face.detector.scoreThreshold, 0.05, (val) => {
config.face.detector.scoreThreshold = parseFloat(val);
config.hand.scoreThreshold = parseFloat(val);
config.body.scoreThreshold = parseFloat(val);
});
settings.addRange('IOU Threshold', 0.1, 1.0, config.face.detector.iouThreshold, 0.05, (val) => {
config.face.detector.iouThreshold = parseFloat(val);
config.hand.iouThreshold = parseFloat(val);
});
settings.addHTML('title', 'UI Options'); settings.hideTitle('title');
settings.addBoolean('Use Web Worker', false);
settings.addBoolean('Draw Boxes', true);
settings.addBoolean('Draw Points', true);
settings.addBoolean('Draw Polygons', true);
settings.addBoolean('Fill Polygons', true);
settings.addHTML('line1', '<hr>'); settings.hideTitle('line1');
settings.addRange('FPS', 0, 100, 0, 1);
}
async function setupCanvas(input) {
// setup canvas object to same size as input as camera resolution may change
const canvas = document.getElementById('canvas');
canvas.width = input.width;
canvas.height = input.height;
return canvas;
}
// eslint-disable-next-line no-unused-vars
async function setupCamera() {
log('Setting up camera');
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https
const video = document.getElementById('video');
if (!navigator.mediaDevices) {
document.getElementById('log').innerText = 'Video not supported';
return null;
}
const stream = await navigator.mediaDevices.getUserMedia({
audio: false,
video: { facingMode: 'user', width: window.innerWidth, height: window.innerHeight },
});
video.srcObject = stream;
return new Promise((resolve) => {
video.onloadedmetadata = () => {
video.width = video.videoWidth;
video.height = video.videoHeight;
video.play();
video.pause();
resolve(video);
};
});
}
// eslint-disable-next-line no-unused-vars
async function setupImage() {
const image = document.getElementById('image');
image.width = window.innerWidth;
image.height = window.innerHeight;
return new Promise((resolve) => {
image.onload = () => resolve(image);
image.src = 'sample.jpg';
});
}
async function main() {
log('Human starting ...');
// setup ui control panel
await setupUI();
// setup webcam
const input = await setupCamera();
// or setup image
// const input = await setupImage();
// setup output canvas from input object
await setupCanvas(input);
const msg = `Human ready: version: ${human.version} TensorFlow/JS version: ${human.tf.version_core}`;
document.getElementById('log').innerText = msg;
log(msg);
// run actual detection. if input is video, it will run in a loop else it will run only once
// runHumanDetect(video, canvas);
}
window.onload = main;
window.onresize = main;
</script>
</body>

27
demo/index.html Normal file
View File

@ -0,0 +1,27 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>Human</title>
<meta http-equiv="content-type">
<meta content="text/html">
<meta charset="UTF-8">
<meta name="description" content="3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction & Emotion Prediction; Author: Vladimir Mandic <mandic00@live.com>">
<meta name="viewport" content="width=device-width, initial-scale=0.5, minimum-scale=0.1, maximum-scale=4.0, shrink-to-fit=yes, user-scalable=yes">
<meta name="theme-color" content="black"/>
<meta name="application-name" content="Human">
<meta name="msapplication-tooltip" content="Human: AI-powered 3D Human Detection">
<link rel="manifest" href="../dist/human.esm.json">
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
<!-- <script src="../assets/tf.min.js"></script> -->
<!-- <script src="../assets/tf-backend-wasm.min.js"></script> -->
<script src="../assets/quicksettings.js"></script>
<script src="./browser.js" type="module"></script>
</head>
<body style="margin: 0; background: black; color: white; font-family: 'Segoe UI'; font-size: 16px; font-variant: small-caps; overflow-x: hidden">
<video id="video" playsinline style="display: none"></video>
<image id="image" src="" style="display: none"></video>
<canvas id="canvas"></canvas>
<div id="samples" style="display: flex; flex-wrap: wrap"></div>
<div id="log" style="position: fixed; bottom: 0">Human library</div>
</body>
</html>

View File

@ -59,7 +59,7 @@ async function detect(input, output) {
image.dispose();
logger.log(result);
// Draw detected data and save processed image
logger.log('Saving:', output);
logger.log('TODO Saving:', output);
}
async function main() {

5337
dist/human-nobundle.cjs vendored

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

9294
dist/human.cjs vendored

File diff suppressed because one or more lines are too long

355
dist/human.cjs.json vendored Normal file
View File

@ -0,0 +1,355 @@
{
"inputs": {
"config.js": {
"bytes": 4569,
"imports": []
},
"package.json": {
"bytes": 2605,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2020,
"imports": []
},
"src/facemesh/blazeface.js": {
"bytes": 7042,
"imports": []
},
"src/facemesh/box.js": {
"bytes": 1924,
"imports": []
},
"src/facemesh/facemesh.js": {
"bytes": 2493,
"imports": [
{
"path": "src/facemesh/blazeface.js"
},
{
"path": "src/facemesh/keypoints.js"
},
{
"path": "src/facemesh/pipeline.js"
},
{
"path": "src/facemesh/uvcoords.js"
},
{
"path": "src/facemesh/triangulation.js"
}
]
},
"src/facemesh/keypoints.js": {
"bytes": 2507,
"imports": []
},
"src/facemesh/pipeline.js": {
"bytes": 14108,
"imports": [
{
"path": "src/facemesh/box.js"
},
{
"path": "src/facemesh/keypoints.js"
},
{
"path": "src/facemesh/util.js"
}
]
},
"src/facemesh/triangulation.js": {
"bytes": 12940,
"imports": []
},
"src/facemesh/util.js": {
"bytes": 3078,
"imports": []
},
"src/facemesh/uvcoords.js": {
"bytes": 19592,
"imports": []
},
"src/handpose/box.js": {
"bytes": 2627,
"imports": []
},
"src/handpose/handdetector.js": {
"bytes": 4253,
"imports": [
{
"path": "src/handpose/box.js"
}
]
},
"src/handpose/handpose.js": {
"bytes": 2365,
"imports": [
{
"path": "src/handpose/handdetector.js"
},
{
"path": "src/handpose/keypoints.js"
},
{
"path": "src/handpose/pipeline.js"
}
]
},
"src/handpose/keypoints.js": {
"bytes": 193,
"imports": []
},
"src/handpose/pipeline.js": {
"bytes": 8202,
"imports": [
{
"path": "src/handpose/box.js"
},
{
"path": "src/handpose/util.js"
}
]
},
"src/handpose/util.js": {
"bytes": 2488,
"imports": []
},
"src/index.js": {
"bytes": 6592,
"imports": [
{
"path": "src/facemesh/facemesh.js"
},
{
"path": "src/ssrnet/ssrnet.js"
},
{
"path": "src/emotion/emotion.js"
},
{
"path": "src/posenet/posenet.js"
},
{
"path": "src/handpose/handpose.js"
},
{
"path": "config.js"
},
{
"path": "package.json"
}
]
},
"src/posenet/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/posenet/heapSort.js"
}
]
},
"src/posenet/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/posenet/buildParts.js"
},
{
"path": "src/posenet/decodePose.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/posenet/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1719,
"imports": []
},
"src/posenet/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "src/posenet/modelBase.js"
}
]
},
"src/posenet/modelPoseNet.js": {
"bytes": 3472,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/modelPoseNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/util.js": {
"bytes": 4202,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/posenet/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1856,
"imports": []
}
},
"outputs": {
"dist/human.cjs.map": {
"imports": [],
"inputs": {},
"bytes": 216661
},
"dist/human.cjs": {
"imports": [],
"inputs": {
"src/facemesh/blazeface.js": {
"bytesInOutput": 7246
},
"src/facemesh/keypoints.js": {
"bytesInOutput": 2771
},
"src/facemesh/box.js": {
"bytesInOutput": 2083
},
"src/facemesh/util.js": {
"bytesInOutput": 3027
},
"src/facemesh/pipeline.js": {
"bytesInOutput": 13162
},
"src/facemesh/uvcoords.js": {
"bytesInOutput": 20586
},
"src/facemesh/triangulation.js": {
"bytesInOutput": 23311
},
"src/facemesh/facemesh.js": {
"bytesInOutput": 2695
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 2068
},
"src/emotion/emotion.js": {
"bytesInOutput": 2132
},
"src/posenet/modelBase.js": {
"bytesInOutput": 1120
},
"src/posenet/modelMobileNet.js": {
"bytesInOutput": 506
},
"src/posenet/heapSort.js": {
"bytesInOutput": 1639
},
"src/posenet/buildParts.js": {
"bytesInOutput": 1754
},
"src/posenet/keypoints.js": {
"bytesInOutput": 2288
},
"src/posenet/vectors.js": {
"bytesInOutput": 1416
},
"src/posenet/decodePose.js": {
"bytesInOutput": 3775
},
"src/posenet/decodeMultiple.js": {
"bytesInOutput": 1992
},
"src/posenet/util.js": {
"bytesInOutput": 4383
},
"src/posenet/modelPoseNet.js": {
"bytesInOutput": 1974
},
"src/posenet/posenet.js": {
"bytesInOutput": 917
},
"src/handpose/box.js": {
"bytesInOutput": 2813
},
"src/handpose/handdetector.js": {
"bytesInOutput": 4271
},
"src/handpose/keypoints.js": {
"bytesInOutput": 265
},
"src/handpose/util.js": {
"bytesInOutput": 2671
},
"src/handpose/pipeline.js": {
"bytesInOutput": 7651
},
"src/handpose/handpose.js": {
"bytesInOutput": 2516
},
"config.js": {
"bytesInOutput": 1853
},
"package.json": {
"bytesInOutput": 2748
},
"src/index.js": {
"bytesInOutput": 5268
}
},
"bytes": 132235
}
}
}

6
dist/human.cjs.map vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

355
dist/human.esm-nobundle.json vendored Normal file
View File

@ -0,0 +1,355 @@
{
"inputs": {
"config.js": {
"bytes": 4569,
"imports": []
},
"package.json": {
"bytes": 2605,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2020,
"imports": []
},
"src/facemesh/blazeface.js": {
"bytes": 7042,
"imports": []
},
"src/facemesh/box.js": {
"bytes": 1924,
"imports": []
},
"src/facemesh/facemesh.js": {
"bytes": 2493,
"imports": [
{
"path": "src/facemesh/blazeface.js"
},
{
"path": "src/facemesh/keypoints.js"
},
{
"path": "src/facemesh/pipeline.js"
},
{
"path": "src/facemesh/uvcoords.js"
},
{
"path": "src/facemesh/triangulation.js"
}
]
},
"src/facemesh/keypoints.js": {
"bytes": 2507,
"imports": []
},
"src/facemesh/pipeline.js": {
"bytes": 14108,
"imports": [
{
"path": "src/facemesh/box.js"
},
{
"path": "src/facemesh/keypoints.js"
},
{
"path": "src/facemesh/util.js"
}
]
},
"src/facemesh/triangulation.js": {
"bytes": 12940,
"imports": []
},
"src/facemesh/util.js": {
"bytes": 3078,
"imports": []
},
"src/facemesh/uvcoords.js": {
"bytes": 19592,
"imports": []
},
"src/handpose/box.js": {
"bytes": 2627,
"imports": []
},
"src/handpose/handdetector.js": {
"bytes": 4253,
"imports": [
{
"path": "src/handpose/box.js"
}
]
},
"src/handpose/handpose.js": {
"bytes": 2365,
"imports": [
{
"path": "src/handpose/handdetector.js"
},
{
"path": "src/handpose/keypoints.js"
},
{
"path": "src/handpose/pipeline.js"
}
]
},
"src/handpose/keypoints.js": {
"bytes": 193,
"imports": []
},
"src/handpose/pipeline.js": {
"bytes": 8202,
"imports": [
{
"path": "src/handpose/box.js"
},
{
"path": "src/handpose/util.js"
}
]
},
"src/handpose/util.js": {
"bytes": 2488,
"imports": []
},
"src/index.js": {
"bytes": 6592,
"imports": [
{
"path": "src/facemesh/facemesh.js"
},
{
"path": "src/ssrnet/ssrnet.js"
},
{
"path": "src/emotion/emotion.js"
},
{
"path": "src/posenet/posenet.js"
},
{
"path": "src/handpose/handpose.js"
},
{
"path": "config.js"
},
{
"path": "package.json"
}
]
},
"src/posenet/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/posenet/heapSort.js"
}
]
},
"src/posenet/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/posenet/buildParts.js"
},
{
"path": "src/posenet/decodePose.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/posenet/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1719,
"imports": []
},
"src/posenet/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "src/posenet/modelBase.js"
}
]
},
"src/posenet/modelPoseNet.js": {
"bytes": 3472,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/modelPoseNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/util.js": {
"bytes": 4202,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/posenet/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1856,
"imports": []
}
},
"outputs": {
"dist/human.esm-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 194952
},
"dist/human.esm-nobundle.js": {
"imports": [],
"inputs": {
"src/facemesh/blazeface.js": {
"bytesInOutput": 3223
},
"src/facemesh/keypoints.js": {
"bytesInOutput": 1950
},
"src/facemesh/box.js": {
"bytesInOutput": 1026
},
"src/facemesh/util.js": {
"bytesInOutput": 1176
},
"src/facemesh/pipeline.js": {
"bytesInOutput": 5541
},
"src/facemesh/uvcoords.js": {
"bytesInOutput": 16790
},
"src/facemesh/triangulation.js": {
"bytesInOutput": 9995
},
"src/facemesh/facemesh.js": {
"bytesInOutput": 1287
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1099
},
"src/emotion/emotion.js": {
"bytesInOutput": 1148
},
"src/posenet/modelBase.js": {
"bytesInOutput": 597
},
"src/posenet/modelMobileNet.js": {
"bytesInOutput": 272
},
"src/posenet/heapSort.js": {
"bytesInOutput": 1046
},
"src/posenet/buildParts.js": {
"bytesInOutput": 551
},
"src/posenet/keypoints.js": {
"bytesInOutput": 1626
},
"src/posenet/vectors.js": {
"bytesInOutput": 612
},
"src/posenet/decodePose.js": {
"bytesInOutput": 1021
},
"src/posenet/decodeMultiple.js": {
"bytesInOutput": 608
},
"src/posenet/util.js": {
"bytesInOutput": 1843
},
"src/posenet/modelPoseNet.js": {
"bytesInOutput": 885
},
"src/posenet/posenet.js": {
"bytesInOutput": 464
},
"src/handpose/box.js": {
"bytesInOutput": 1400
},
"src/handpose/handdetector.js": {
"bytesInOutput": 2074
},
"src/handpose/keypoints.js": {
"bytesInOutput": 160
},
"src/handpose/util.js": {
"bytesInOutput": 977
},
"src/handpose/pipeline.js": {
"bytesInOutput": 3230
},
"src/handpose/handpose.js": {
"bytesInOutput": 1326
},
"config.js": {
"bytesInOutput": 1136
},
"package.json": {
"bytesInOutput": 2275
},
"src/index.js": {
"bytesInOutput": 2995
}
},
"bytes": 68596
}
}
}

2
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

623
dist/human.esm.json vendored Normal file
View File

@ -0,0 +1,623 @@
{
"inputs": {
"config.js": {
"bytes": 4569,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
"bytes": 272720,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
},
{
"path": "node_modules/seedrandom/index.js"
}
]
},
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
"bytes": 571410,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
}
]
},
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
"bytes": 294510,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
}
]
},
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
"bytes": 1305668,
"imports": [
{
"path": "empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js"
},
{
"path": "empty:util"
},
{
"path": "empty:crypto"
}
]
},
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
"bytes": 217016,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
},
{
"path": "empty:crypto"
},
{
"path": "empty:/home/vlado/dev/human/node_modules/string_decoder/lib/string_decoder.js"
}
]
},
"node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js": {
"bytes": 811045,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
}
]
},
"node_modules/@tensorflow/tfjs/dist/tf.node.js": {
"bytes": 2953,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js"
}
]
},
"node_modules/seedrandom/index.js": {
"bytes": 2110,
"imports": [
{
"path": "node_modules/seedrandom/lib/alea.js"
},
{
"path": "node_modules/seedrandom/lib/xor128.js"
},
{
"path": "node_modules/seedrandom/lib/xorwow.js"
},
{
"path": "node_modules/seedrandom/lib/xorshift7.js"
},
{
"path": "node_modules/seedrandom/lib/xor4096.js"
},
{
"path": "node_modules/seedrandom/lib/tychei.js"
},
{
"path": "node_modules/seedrandom/seedrandom.js"
}
]
},
"node_modules/seedrandom/lib/alea.js": {
"bytes": 3243,
"imports": []
},
"node_modules/seedrandom/lib/tychei.js": {
"bytes": 2525,
"imports": []
},
"node_modules/seedrandom/lib/xor128.js": {
"bytes": 1748,
"imports": []
},
"node_modules/seedrandom/lib/xor4096.js": {
"bytes": 4559,
"imports": []
},
"node_modules/seedrandom/lib/xorshift7.js": {
"bytes": 2418,
"imports": []
},
"node_modules/seedrandom/lib/xorwow.js": {
"bytes": 1919,
"imports": []
},
"node_modules/seedrandom/seedrandom.js": {
"bytes": 8358,
"imports": [
{
"path": "empty:crypto"
}
]
},
"package.json": {
"bytes": 2605,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2020,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/facemesh/blazeface.js": {
"bytes": 7042,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/facemesh/box.js": {
"bytes": 1924,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/facemesh/facemesh.js": {
"bytes": 2493,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/blazeface.js"
},
{
"path": "src/facemesh/keypoints.js"
},
{
"path": "src/facemesh/pipeline.js"
},
{
"path": "src/facemesh/uvcoords.js"
},
{
"path": "src/facemesh/triangulation.js"
}
]
},
"src/facemesh/keypoints.js": {
"bytes": 2507,
"imports": []
},
"src/facemesh/pipeline.js": {
"bytes": 14108,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/box.js"
},
{
"path": "src/facemesh/keypoints.js"
},
{
"path": "src/facemesh/util.js"
}
]
},
"src/facemesh/triangulation.js": {
"bytes": 12940,
"imports": []
},
"src/facemesh/util.js": {
"bytes": 3078,
"imports": []
},
"src/facemesh/uvcoords.js": {
"bytes": 19592,
"imports": []
},
"src/handpose/box.js": {
"bytes": 2627,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/handpose/handdetector.js": {
"bytes": 4253,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/box.js"
}
]
},
"src/handpose/handpose.js": {
"bytes": 2365,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/handdetector.js"
},
{
"path": "src/handpose/keypoints.js"
},
{
"path": "src/handpose/pipeline.js"
}
]
},
"src/handpose/keypoints.js": {
"bytes": 193,
"imports": []
},
"src/handpose/pipeline.js": {
"bytes": 8202,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/box.js"
},
{
"path": "src/handpose/util.js"
}
]
},
"src/handpose/util.js": {
"bytes": 2488,
"imports": []
},
"src/index.js": {
"bytes": 6592,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/facemesh.js"
},
{
"path": "src/ssrnet/ssrnet.js"
},
{
"path": "src/emotion/emotion.js"
},
{
"path": "src/posenet/posenet.js"
},
{
"path": "src/handpose/handpose.js"
},
{
"path": "config.js"
},
{
"path": "package.json"
}
]
},
"src/posenet/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/posenet/heapSort.js"
}
]
},
"src/posenet/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/posenet/buildParts.js"
},
{
"path": "src/posenet/decodePose.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/posenet/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1719,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/posenet/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/posenet/modelBase.js"
}
]
},
"src/posenet/modelPoseNet.js": {
"bytes": 3472,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/modelPoseNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/util.js": {
"bytes": 4202,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/posenet/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1856,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
"bytes": 0,
"imports": []
},
"empty:/home/vlado/dev/human/node_modules/string_decoder/lib/string_decoder.js": {
"bytes": 0,
"imports": []
},
"empty:crypto": {
"bytes": 0,
"imports": []
},
"empty:util": {
"bytes": 0,
"imports": []
}
},
"outputs": {
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
"bytes": 4956003
},
"dist/human.esm.js": {
"imports": [],
"inputs": {
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
"bytesInOutput": 18
},
"empty:util": {
"bytesInOutput": 18
},
"empty:crypto": {
"bytesInOutput": 18
},
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
"bytesInOutput": 293678
},
"node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js": {
"bytesInOutput": 238720
},
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
"bytesInOutput": 108682
},
"empty:/home/vlado/dev/human/node_modules/string_decoder/lib/string_decoder.js": {
"bytesInOutput": 18
},
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
"bytesInOutput": 52362
},
"node_modules/seedrandom/lib/alea.js": {
"bytesInOutput": 995
},
"node_modules/seedrandom/lib/xor128.js": {
"bytesInOutput": 760
},
"node_modules/seedrandom/lib/xorwow.js": {
"bytesInOutput": 850
},
"node_modules/seedrandom/lib/xorshift7.js": {
"bytesInOutput": 1006
},
"node_modules/seedrandom/lib/xor4096.js": {
"bytesInOutput": 1169
},
"node_modules/seedrandom/lib/tychei.js": {
"bytesInOutput": 885
},
"node_modules/seedrandom/seedrandom.js": {
"bytesInOutput": 1612
},
"node_modules/seedrandom/index.js": {
"bytesInOutput": 176
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
"bytesInOutput": 75760
},
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
"bytesInOutput": 251382
},
"node_modules/@tensorflow/tfjs/dist/tf.node.js": {
"bytesInOutput": 765
},
"src/facemesh/blazeface.js": {
"bytesInOutput": 3238
},
"src/facemesh/keypoints.js": {
"bytesInOutput": 1951
},
"src/facemesh/box.js": {
"bytesInOutput": 1011
},
"src/facemesh/util.js": {
"bytesInOutput": 1195
},
"src/facemesh/pipeline.js": {
"bytesInOutput": 5520
},
"src/facemesh/uvcoords.js": {
"bytesInOutput": 16791
},
"src/facemesh/triangulation.js": {
"bytesInOutput": 9996
},
"src/facemesh/facemesh.js": {
"bytesInOutput": 1273
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1100
},
"src/emotion/emotion.js": {
"bytesInOutput": 1143
},
"src/posenet/modelBase.js": {
"bytesInOutput": 575
},
"src/posenet/modelMobileNet.js": {
"bytesInOutput": 250
},
"src/posenet/heapSort.js": {
"bytesInOutput": 1047
},
"src/posenet/buildParts.js": {
"bytesInOutput": 552
},
"src/posenet/keypoints.js": {
"bytesInOutput": 1638
},
"src/posenet/vectors.js": {
"bytesInOutput": 621
},
"src/posenet/decodePose.js": {
"bytesInOutput": 1029
},
"src/posenet/decodeMultiple.js": {
"bytesInOutput": 609
},
"src/posenet/util.js": {
"bytesInOutput": 1840
},
"src/posenet/modelPoseNet.js": {
"bytesInOutput": 863
},
"src/posenet/posenet.js": {
"bytesInOutput": 479
},
"src/handpose/box.js": {
"bytesInOutput": 1386
},
"src/handpose/handdetector.js": {
"bytesInOutput": 2084
},
"src/handpose/keypoints.js": {
"bytesInOutput": 161
},
"src/handpose/util.js": {
"bytesInOutput": 993
},
"src/handpose/pipeline.js": {
"bytesInOutput": 3228
},
"src/handpose/handpose.js": {
"bytesInOutput": 1312
},
"config.js": {
"bytesInOutput": 1137
},
"package.json": {
"bytesInOutput": 2276
},
"src/index.js": {
"bytesInOutput": 3058
}
},
"bytes": 1105497
}
}
}

2
dist/human.js vendored

File diff suppressed because one or more lines are too long

6
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

623
dist/human.json vendored Normal file
View File

@ -0,0 +1,623 @@
{
"inputs": {
"config.js": {
"bytes": 4569,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
"bytes": 272720,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
},
{
"path": "node_modules/seedrandom/index.js"
}
]
},
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
"bytes": 571410,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
}
]
},
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
"bytes": 294510,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
}
]
},
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
"bytes": 1305668,
"imports": [
{
"path": "empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js"
},
{
"path": "empty:util"
},
{
"path": "empty:crypto"
}
]
},
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
"bytes": 217016,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
},
{
"path": "empty:crypto"
},
{
"path": "empty:/home/vlado/dev/human/node_modules/string_decoder/lib/string_decoder.js"
}
]
},
"node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js": {
"bytes": 811045,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
}
]
},
"node_modules/@tensorflow/tfjs/dist/tf.node.js": {
"bytes": 2953,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js"
}
]
},
"node_modules/seedrandom/index.js": {
"bytes": 2110,
"imports": [
{
"path": "node_modules/seedrandom/lib/alea.js"
},
{
"path": "node_modules/seedrandom/lib/xor128.js"
},
{
"path": "node_modules/seedrandom/lib/xorwow.js"
},
{
"path": "node_modules/seedrandom/lib/xorshift7.js"
},
{
"path": "node_modules/seedrandom/lib/xor4096.js"
},
{
"path": "node_modules/seedrandom/lib/tychei.js"
},
{
"path": "node_modules/seedrandom/seedrandom.js"
}
]
},
"node_modules/seedrandom/lib/alea.js": {
"bytes": 3243,
"imports": []
},
"node_modules/seedrandom/lib/tychei.js": {
"bytes": 2525,
"imports": []
},
"node_modules/seedrandom/lib/xor128.js": {
"bytes": 1748,
"imports": []
},
"node_modules/seedrandom/lib/xor4096.js": {
"bytes": 4559,
"imports": []
},
"node_modules/seedrandom/lib/xorshift7.js": {
"bytes": 2418,
"imports": []
},
"node_modules/seedrandom/lib/xorwow.js": {
"bytes": 1919,
"imports": []
},
"node_modules/seedrandom/seedrandom.js": {
"bytes": 8358,
"imports": [
{
"path": "empty:crypto"
}
]
},
"package.json": {
"bytes": 2605,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2020,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/facemesh/blazeface.js": {
"bytes": 7042,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/facemesh/box.js": {
"bytes": 1924,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/facemesh/facemesh.js": {
"bytes": 2493,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/blazeface.js"
},
{
"path": "src/facemesh/keypoints.js"
},
{
"path": "src/facemesh/pipeline.js"
},
{
"path": "src/facemesh/uvcoords.js"
},
{
"path": "src/facemesh/triangulation.js"
}
]
},
"src/facemesh/keypoints.js": {
"bytes": 2507,
"imports": []
},
"src/facemesh/pipeline.js": {
"bytes": 14108,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/box.js"
},
{
"path": "src/facemesh/keypoints.js"
},
{
"path": "src/facemesh/util.js"
}
]
},
"src/facemesh/triangulation.js": {
"bytes": 12940,
"imports": []
},
"src/facemesh/util.js": {
"bytes": 3078,
"imports": []
},
"src/facemesh/uvcoords.js": {
"bytes": 19592,
"imports": []
},
"src/handpose/box.js": {
"bytes": 2627,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/handpose/handdetector.js": {
"bytes": 4253,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/box.js"
}
]
},
"src/handpose/handpose.js": {
"bytes": 2365,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/handdetector.js"
},
{
"path": "src/handpose/keypoints.js"
},
{
"path": "src/handpose/pipeline.js"
}
]
},
"src/handpose/keypoints.js": {
"bytes": 193,
"imports": []
},
"src/handpose/pipeline.js": {
"bytes": 8202,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/box.js"
},
{
"path": "src/handpose/util.js"
}
]
},
"src/handpose/util.js": {
"bytes": 2488,
"imports": []
},
"src/index.js": {
"bytes": 6592,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/facemesh.js"
},
{
"path": "src/ssrnet/ssrnet.js"
},
{
"path": "src/emotion/emotion.js"
},
{
"path": "src/posenet/posenet.js"
},
{
"path": "src/handpose/handpose.js"
},
{
"path": "config.js"
},
{
"path": "package.json"
}
]
},
"src/posenet/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/posenet/heapSort.js"
}
]
},
"src/posenet/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/posenet/buildParts.js"
},
{
"path": "src/posenet/decodePose.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/posenet/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1719,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/posenet/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/posenet/modelBase.js"
}
]
},
"src/posenet/modelPoseNet.js": {
"bytes": 3472,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/modelPoseNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/util.js": {
"bytes": 4202,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/posenet/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1856,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
"bytes": 0,
"imports": []
},
"empty:/home/vlado/dev/human/node_modules/string_decoder/lib/string_decoder.js": {
"bytes": 0,
"imports": []
},
"empty:crypto": {
"bytes": 0,
"imports": []
},
"empty:util": {
"bytes": 0,
"imports": []
}
},
"outputs": {
"dist/human.js.map": {
"imports": [],
"inputs": {},
"bytes": 4956003
},
"dist/human.js": {
"imports": [],
"inputs": {
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
"bytesInOutput": 18
},
"empty:util": {
"bytesInOutput": 18
},
"empty:crypto": {
"bytesInOutput": 18
},
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
"bytesInOutput": 293678
},
"node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js": {
"bytesInOutput": 238720
},
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
"bytesInOutput": 108682
},
"empty:/home/vlado/dev/human/node_modules/string_decoder/lib/string_decoder.js": {
"bytesInOutput": 18
},
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
"bytesInOutput": 52362
},
"node_modules/seedrandom/lib/alea.js": {
"bytesInOutput": 995
},
"node_modules/seedrandom/lib/xor128.js": {
"bytesInOutput": 760
},
"node_modules/seedrandom/lib/xorwow.js": {
"bytesInOutput": 850
},
"node_modules/seedrandom/lib/xorshift7.js": {
"bytesInOutput": 1006
},
"node_modules/seedrandom/lib/xor4096.js": {
"bytesInOutput": 1169
},
"node_modules/seedrandom/lib/tychei.js": {
"bytesInOutput": 885
},
"node_modules/seedrandom/seedrandom.js": {
"bytesInOutput": 1612
},
"node_modules/seedrandom/index.js": {
"bytesInOutput": 176
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
"bytesInOutput": 75760
},
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
"bytesInOutput": 251382
},
"node_modules/@tensorflow/tfjs/dist/tf.node.js": {
"bytesInOutput": 765
},
"src/facemesh/blazeface.js": {
"bytesInOutput": 3238
},
"src/facemesh/keypoints.js": {
"bytesInOutput": 1951
},
"src/facemesh/box.js": {
"bytesInOutput": 1011
},
"src/facemesh/util.js": {
"bytesInOutput": 1195
},
"src/facemesh/pipeline.js": {
"bytesInOutput": 5520
},
"src/facemesh/uvcoords.js": {
"bytesInOutput": 16791
},
"src/facemesh/triangulation.js": {
"bytesInOutput": 9996
},
"src/facemesh/facemesh.js": {
"bytesInOutput": 1273
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1100
},
"src/emotion/emotion.js": {
"bytesInOutput": 1143
},
"src/posenet/modelBase.js": {
"bytesInOutput": 575
},
"src/posenet/modelMobileNet.js": {
"bytesInOutput": 250
},
"src/posenet/heapSort.js": {
"bytesInOutput": 1047
},
"src/posenet/buildParts.js": {
"bytesInOutput": 552
},
"src/posenet/keypoints.js": {
"bytesInOutput": 1638
},
"src/posenet/vectors.js": {
"bytesInOutput": 621
},
"src/posenet/decodePose.js": {
"bytesInOutput": 1029
},
"src/posenet/decodeMultiple.js": {
"bytesInOutput": 609
},
"src/posenet/util.js": {
"bytesInOutput": 1840
},
"src/posenet/modelPoseNet.js": {
"bytesInOutput": 863
},
"src/posenet/posenet.js": {
"bytesInOutput": 479
},
"src/handpose/box.js": {
"bytesInOutput": 1386
},
"src/handpose/handdetector.js": {
"bytesInOutput": 2084
},
"src/handpose/keypoints.js": {
"bytesInOutput": 161
},
"src/handpose/util.js": {
"bytesInOutput": 993
},
"src/handpose/pipeline.js": {
"bytesInOutput": 3228
},
"src/handpose/handpose.js": {
"bytesInOutput": 1312
},
"config.js": {
"bytesInOutput": 1137
},
"package.json": {
"bytesInOutput": 2276
},
"src/index.js": {
"bytesInOutput": 3058
}
},
"bytes": 1105506
}
}
}

BIN
favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@ -3,7 +3,7 @@
"version": "0.3.5",
"description": "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
"sideEffects": false,
"main": "dist/human-nobundle.cjs",
"main": "dist/human.cjs",
"module": "dist/human.esm.js",
"browser": "dist/human.esm.js",
"author": "Vladimir Mandic <mandic00@live.com>",
@ -37,14 +37,13 @@
"rimraf": "^3.0.2"
},
"scripts": {
"start": "node --trace-warnings --trace-uncaught --no-deprecation demo/demo-node.js",
"start": "node --trace-warnings --trace-uncaught --no-deprecation demo/node.js",
"lint": "eslint src/*.js demo/*.js",
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --outfile=dist/human.js src/index.js",
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --outfile=dist/human.esm.js src/index.js",
"build-esm-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --outfile=dist/human.esm-nobundle.js src/index.js",
"build-node-bundle": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --minify --outfile=dist/human.cjs src/index.js",
"build-node-nobundle": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --outfile=dist/human-nobundle.cjs src/index.js",
"build": "rimraf dist/* && npm run build-iife && npm run build-esm-bundle && npm run build-esm-nobundle && npm run build-node-bundle && npm run build-node-nobundle && ls -l dist/",
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/index.js",
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/index.js",
"build-esm-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/index.js",
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --metafile=dist/human.cjs.json --outfile=dist/human.cjs src/index.js",
"build": "rimraf dist/* && npm run build-iife && npm run build-esm-bundle && npm run build-esm-nobundle && npm run build-node && ls -l dist/",
"update": "npm update --depth 20 && npm dedupe && npm prune && npm audit",
"changelog": "node changelog.js"
},

View File

@ -4,7 +4,7 @@ const ssrnet = require('./ssrnet/ssrnet.js');
const emotion = require('./emotion/emotion.js');
const posenet = require('./posenet/posenet.js');
const handpose = require('./handpose/handpose.js');
const defaults = require('./config.js').default;
const defaults = require('../config.js').default;
const app = require('../package.json');
let config;