mirror of https://github.com/vladmandic/human
implemented multi-hand support
parent
04b283db32
commit
ded889484b
|
@ -49,6 +49,8 @@
|
|||
"promise/catch-or-return": "off",
|
||||
"promise/no-nesting": "off",
|
||||
"import/no-absolute-path": "off",
|
||||
"import/no-extraneous-dependencies": "off",
|
||||
"node/no-unpublished-require": "off",
|
||||
"no-regex-spaces": "off",
|
||||
"radix": "off"
|
||||
}
|
||||
|
|
76
README.md
76
README.md
|
@ -1,11 +1,14 @@
|
|||
# Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking and Age & Gender Prediction
|
||||
|
||||
**Documentation**: <https://github.com/vladmandic/human#readme>
|
||||
**Code Repository**: <https://github.com/vladmandic/human>
|
||||
**Package**: <https://www.npmjs.com/package/@vladmandic/human>
|
||||
**Live Demo**: <https://vladmandic.github.io/human/demo/demo-esm.html>
|
||||
- [**Documentation**](https://github.com/vladmandic/human#readme)
|
||||
- [**Code Repository**](https://github.com/vladmandic/human)
|
||||
- [**Package**](https://www.npmjs.com/package/@vladmandic/human)
|
||||
- [**Issues Tracker**](https://github.com/vladmandic/human/issues)
|
||||
- [**Live Demo**](https://vladmandic.github.io/human/demo/demo-esm.html)
|
||||
|
||||
Compatible with Browser, WebWorker and NodeJS** execution!
|
||||
Compatible with Browser, WebWorker and NodeJS execution!
|
||||
|
||||
*This is a pre-release project, see [issues](https://github.com/vladmandic/human/issues) for list of known limitations*
|
||||
|
||||
*Suggestions are welcome!*
|
||||
|
||||
|
@ -47,7 +50,7 @@ There are multiple ways to use `Human` library, pick one that suits you:
|
|||
Simply download `dist/human.js`, include it in your `HTML` file & it's ready to use.
|
||||
|
||||
```html
|
||||
<script src="dist/human.js"><script>
|
||||
<script src="dist/human.js"><script>
|
||||
```
|
||||
|
||||
IIFE script auto-registers global namespace `human` within global `Window` object
|
||||
|
@ -64,9 +67,17 @@ IIFE script is distributed in minified form with attached sourcemap
|
|||
If you're using bundler *(such as rollup, webpack, esbuild)* to package your client application, you can import ESM version of `Human` library which supports full tree shaking
|
||||
|
||||
```js
|
||||
import human from 'dist/human.esm.js';
|
||||
import human from '@vladmandic/human'; // points to @vladmandic/human/dist/human.esm.js
|
||||
```
|
||||
|
||||
Or if you prefer to package your version of `tfjs`, you can use `nobundle` version
|
||||
|
||||
```js
|
||||
import tf from '@tensorflow/tfjs'
|
||||
import human from '@vladmandic/human/dist/human.nobundle.js'; // same functionality as default import, but without tfjs bundled
|
||||
```
|
||||
|
||||
|
||||
#### 2.2 Using Script Module
|
||||
You could use same syntax within your main `JS` file if it's imported with `<script type="module">`
|
||||
|
||||
|
@ -94,11 +105,26 @@ Install with:
|
|||
```
|
||||
And then use with:
|
||||
```js
|
||||
const tf = require('@tensorflow/tfjs-node');
|
||||
const human = require('@vladmandic/human');
|
||||
const tf = require('@tensorflow/tfjs-node');
|
||||
const human = require('@vladmandic/human'); // points to @vladmandic/human/dist/human.node.js
|
||||
```
|
||||
*See limitations for NodeJS usage under `demo`*
|
||||
|
||||
Since NodeJS projects load `weights` from local filesystem instead of using `http` calls, you must modify default configuration to include correct paths with `file://` prefix
|
||||
For example:
|
||||
```js
|
||||
const config = {
|
||||
body: { enabled: true, modelPath: 'file://models/posenet/model.json' },
|
||||
}
|
||||
```
|
||||
|
||||
Note that when using `Human` in NodeJS, you must load and parse the image *before* you pass it for detection
|
||||
For example:
|
||||
```js
|
||||
const buffer = fs.readFileSync(input);
|
||||
const image = tf.node.decodeImage(buffer);
|
||||
const result = human.detect(image, config);
|
||||
image.dispose();
|
||||
```
|
||||
|
||||
### Weights
|
||||
|
||||
|
@ -122,10 +148,6 @@ NodeJS:
|
|||
- `demo-node`: Demo using NodeJS with CJS module
|
||||
This is a very simple demo as althought `Human` library is compatible with NodeJS execution
|
||||
and is able to load images and models from local filesystem,
|
||||
`tfjs-node` backend does not implement function required for execution of some models
|
||||
|
||||
Currently only body pose detection works while face and hand models are not supported
|
||||
See `tfjs-node` issue <https://github.com/tensorflow/tfjs/issues/4066> for details
|
||||
|
||||
<hr>
|
||||
|
||||
|
@ -137,20 +159,28 @@ All configuration is done in a single JSON object and all model weights will be
|
|||
There is only *ONE* method you need:
|
||||
|
||||
```js
|
||||
import * as tf from '@tensorflow/tfjs';
|
||||
import human from '@vladmandic/human';
|
||||
import * as tf from '@tensorflow/tfjs';
|
||||
import human from '@vladmandic/human';
|
||||
|
||||
// 'image': can be of any type of an image object: HTMLImage, HTMLVideo, HTMLMedia, Canvas, Tensor4D
|
||||
// 'options': optional parameter used to override any options present in default configuration
|
||||
const result = await human.detect(image, options?)
|
||||
// 'image': can be of any type of an image object: HTMLImage, HTMLVideo, HTMLMedia, Canvas, Tensor4D
|
||||
// 'options': optional parameter used to override any options present in default configuration
|
||||
const result = await human.detect(image, options?)
|
||||
```
|
||||
|
||||
or if you want to use promises
|
||||
|
||||
```js
|
||||
human.detect(image, options?).then((result) => {
|
||||
// your code
|
||||
})
|
||||
```
|
||||
|
||||
Additionally, `Human` library exposes several classes:
|
||||
|
||||
```js
|
||||
human.defaults // default configuration object
|
||||
human.models // dynamically maintained object of any loaded models
|
||||
human.tf // instance of tfjs used by human
|
||||
human.defaults // default configuration object
|
||||
human.models // dynamically maintained object of any loaded models
|
||||
human.tf // instance of tfjs used by human
|
||||
```
|
||||
|
||||
<hr>
|
||||
|
@ -299,7 +329,5 @@ Library can also be used on mobile devices
|
|||
|
||||
## Todo
|
||||
|
||||
- Improve detection of smaller faces
|
||||
- Tweak default parameters
|
||||
- Verify age/gender models
|
||||
- Make it work with multiple hands
|
||||
|
|
|
@ -10,15 +10,15 @@ const ui = {
|
|||
|
||||
const config = {
|
||||
face: {
|
||||
enabled: true,
|
||||
detector: { maxFaces: 10, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
||||
enabled: false,
|
||||
detector: { maxFaces: 10, skipFrames: 10, minConfidence: 0.5, iouThreshold: 0.3, scoreThreshold: 0.7 },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: true },
|
||||
age: { enabled: true, skipFrames: 5 },
|
||||
age: { enabled: true, skipFrames: 10 },
|
||||
gender: { enabled: true },
|
||||
},
|
||||
body: { enabled: true, maxDetections: 5, scoreThreshold: 0.75, nmsRadius: 20 },
|
||||
hand: { enabled: true, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
||||
body: { enabled: false, maxDetections: 10, scoreThreshold: 0.7, nmsRadius: 20 },
|
||||
hand: { enabled: true, skipFrames: 10, minConfidence: 0.5, iouThreshold: 0.3, scoreThreshold: 0.7 },
|
||||
};
|
||||
let settings;
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const tf = require('@tensorflow/tfjs-node');
|
||||
const fs = require('fs');
|
||||
const process = require('process');
|
||||
const console = require('console');
|
||||
const tf = require('@tensorflow/tfjs-node');
|
||||
const human = require('..'); // this would be '@vladmandic/human'
|
||||
|
||||
const logger = new console.Console({
|
||||
|
@ -54,6 +54,7 @@ async function detect(input, output) {
|
|||
const image = tf.node.decodeImage(buffer);
|
||||
logger.log('Processing:', image.shape);
|
||||
const result = await human.detect(image, config);
|
||||
image.dispose();
|
||||
logger.log(result);
|
||||
// Draw detected data and save processed image
|
||||
logger.log('Saving:', output);
|
||||
|
|
|
@ -4,19 +4,3 @@ onmessage = async (msg) => {
|
|||
const result = await human.detect(msg.data.image, msg.data.config);
|
||||
postMessage(result);
|
||||
};
|
||||
|
||||
/*
|
||||
|
||||
web workers are finicky
|
||||
- cannot pass HTMLImage or HTMLVideo to web worker, so need to pass canvas instead
|
||||
- canvases can execute transferControlToOffscreen() and then become offscreenCanvas which can be passed to worker, but...
|
||||
cannot transfer canvas that has a rendering context (basically, first time you execute getContext() on it)
|
||||
|
||||
which means that if we pass main Canvas that will be used to render results on,
|
||||
then all operations on it must be within webworker and we cannot touch it in the main thread at all.
|
||||
doable, but...how to paint a video frame on it before we pass it?
|
||||
|
||||
and we create new offscreenCanvas that we drew video frame on and pass it's imageData and return results from worker
|
||||
then there is an overhead of creating it and it ends up being slower than executing in the main thread
|
||||
|
||||
*/
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -3814,7 +3814,7 @@ var require_triangulation = __commonJS((exports2) => {
|
|||
];
|
||||
});
|
||||
|
||||
// src/facemesh/index.js
|
||||
// src/facemesh/facemesh.js
|
||||
var require_facemesh = __commonJS((exports2) => {
|
||||
const tf2 = require("@tensorflow/tfjs");
|
||||
const blazeface = require_blazeface();
|
||||
|
@ -3880,7 +3880,7 @@ var require_facemesh = __commonJS((exports2) => {
|
|||
exports2.triangulation = triangulation;
|
||||
});
|
||||
|
||||
// src/ssrnet/index.js
|
||||
// src/ssrnet/ssrnet.js
|
||||
var require_ssrnet = __commonJS((exports2) => {
|
||||
const tf2 = require("@tensorflow/tfjs");
|
||||
const models2 = {};
|
||||
|
@ -4339,97 +4339,6 @@ var require_decodeMultiple = __commonJS((exports2) => {
|
|||
exports2.decodeMultiplePoses = decodeMultiplePoses;
|
||||
});
|
||||
|
||||
// src/posenet/decoders.js
|
||||
var require_decoders = __commonJS((exports2) => {
|
||||
const tf2 = require("@tensorflow/tfjs");
|
||||
const kpt = require_keypoints2();
|
||||
function getPointsConfidence(heatmapScores, heatMapCoords) {
|
||||
const numKeypoints = heatMapCoords.shape[0];
|
||||
const result = new Float32Array(numKeypoints);
|
||||
for (let keypoint = 0; keypoint < numKeypoints; keypoint++) {
|
||||
const y = heatMapCoords.get(keypoint, 0);
|
||||
const x = heatMapCoords.get(keypoint, 1);
|
||||
result[keypoint] = heatmapScores.get(y, x, keypoint);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports2.getPointsConfidence = getPointsConfidence;
|
||||
function getOffsetPoint(y, x, keypoint, offsetsBuffer) {
|
||||
return {
|
||||
y: offsetsBuffer.get(y, x, keypoint),
|
||||
x: offsetsBuffer.get(y, x, keypoint + kpt.NUM_KEYPOINTS)
|
||||
};
|
||||
}
|
||||
function getOffsetVectors(heatMapCoordsBuffer, offsetsBuffer) {
|
||||
const result = [];
|
||||
for (let keypoint = 0; keypoint < kpt.NUM_KEYPOINTS; keypoint++) {
|
||||
const heatmapY = heatMapCoordsBuffer.get(keypoint, 0).valueOf();
|
||||
const heatmapX = heatMapCoordsBuffer.get(keypoint, 1).valueOf();
|
||||
const {x, y} = getOffsetPoint(heatmapY, heatmapX, keypoint, offsetsBuffer);
|
||||
result.push(y);
|
||||
result.push(x);
|
||||
}
|
||||
return tf2.tensor2d(result, [kpt.NUM_KEYPOINTS, 2]);
|
||||
}
|
||||
exports2.getOffsetVectors = getOffsetVectors;
|
||||
function getOffsetPoints(heatMapCoordsBuffer, outputStride, offsetsBuffer) {
|
||||
return tf2.tidy(() => {
|
||||
const offsetVectors = getOffsetVectors(heatMapCoordsBuffer, offsetsBuffer);
|
||||
return heatMapCoordsBuffer.toTensor().mul(tf2.scalar(outputStride, "int32")).toFloat().add(offsetVectors);
|
||||
});
|
||||
}
|
||||
exports2.getOffsetPoints = getOffsetPoints;
|
||||
function mod(a, b) {
|
||||
return tf2.tidy(() => {
|
||||
const floored = a.div(tf2.scalar(b, "int32"));
|
||||
return a.sub(floored.mul(tf2.scalar(b, "int32")));
|
||||
});
|
||||
}
|
||||
function argmax2d(inputs) {
|
||||
const [height, width, depth] = inputs.shape;
|
||||
return tf2.tidy(() => {
|
||||
const reshaped = inputs.reshape([height * width, depth]);
|
||||
const coords = reshaped.argMax(0);
|
||||
const yCoords = coords.div(tf2.scalar(width, "int32")).expandDims(1);
|
||||
const xCoords = mod(coords, width).expandDims(1);
|
||||
return tf2.concat([yCoords, xCoords], 1);
|
||||
});
|
||||
}
|
||||
exports2.argmax2d = argmax2d;
|
||||
});
|
||||
|
||||
// src/posenet/decodeSingle.js
|
||||
var require_decodeSingle = __commonJS((exports2) => {
|
||||
const kpt = require_keypoints2();
|
||||
const decoders = require_decoders();
|
||||
async function decodeSinglePose(heatmapScores, offsets, outputStride) {
|
||||
let totalScore = 0;
|
||||
const heatmapValues = decoders.argmax2d(heatmapScores);
|
||||
const allTensorBuffers = await Promise.all([heatmapScores.buffer(), offsets.buffer(), heatmapValues.buffer()]);
|
||||
const scoresBuffer = allTensorBuffers[0];
|
||||
const offsetsBuffer = allTensorBuffers[1];
|
||||
const heatmapValuesBuffer = allTensorBuffers[2];
|
||||
const offsetPoints = decoders.getOffsetPoints(heatmapValuesBuffer, outputStride, offsetsBuffer);
|
||||
const offsetPointsBuffer = await offsetPoints.buffer();
|
||||
const keypointConfidence = Array.from(decoders.getPointsConfidence(scoresBuffer, heatmapValuesBuffer));
|
||||
const keypoints = keypointConfidence.map((score, keypointId) => {
|
||||
totalScore += score;
|
||||
return {
|
||||
position: {
|
||||
y: offsetPointsBuffer.get(keypointId, 0),
|
||||
x: offsetPointsBuffer.get(keypointId, 1)
|
||||
},
|
||||
part: kpt.partNames[keypointId],
|
||||
score
|
||||
};
|
||||
});
|
||||
heatmapValues.dispose();
|
||||
offsetPoints.dispose();
|
||||
return {keypoints, score: totalScore / keypoints.length};
|
||||
}
|
||||
exports2.decodeSinglePose = decodeSinglePose;
|
||||
});
|
||||
|
||||
// src/posenet/util.js
|
||||
var require_util2 = __commonJS((exports2) => {
|
||||
const tf2 = require("@tensorflow/tfjs");
|
||||
|
@ -4545,14 +4454,13 @@ var require_modelPoseNet = __commonJS((exports2) => {
|
|||
const tf2 = require("@tensorflow/tfjs");
|
||||
const modelMobileNet = require_modelMobileNet();
|
||||
const decodeMultiple = require_decodeMultiple();
|
||||
const decodeSingle = require_decodeSingle();
|
||||
const util = require_util2();
|
||||
class PoseNet {
|
||||
constructor(net, inputResolution) {
|
||||
this.baseModel = net;
|
||||
this.inputResolution = inputResolution;
|
||||
}
|
||||
async estimateMultiplePoses(input, config) {
|
||||
async estimatePoses(input, config) {
|
||||
const outputStride = this.baseModel.outputStride;
|
||||
const inputResolution = this.inputResolution;
|
||||
const [height, width] = util.getInputTensorDimensions(input);
|
||||
|
@ -4572,22 +4480,6 @@ var require_modelPoseNet = __commonJS((exports2) => {
|
|||
resized.dispose();
|
||||
return resultPoses;
|
||||
}
|
||||
async estimateSinglePose(input) {
|
||||
const outputStride = this.baseModel.outputStride;
|
||||
const inputResolution = this.inputResolution;
|
||||
const [height, width] = util.getInputTensorDimensions(input);
|
||||
const {resized, padding} = util.padAndResizeTo(input, inputResolution);
|
||||
const {heatmapScores, offsets, displacementFwd, displacementBwd} = this.baseModel.predict(resized);
|
||||
const pose = await decodeSingle.decodeSinglePose(heatmapScores, offsets, outputStride);
|
||||
const poses = [pose];
|
||||
const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [inputResolution, inputResolution], padding);
|
||||
heatmapScores.dispose();
|
||||
offsets.dispose();
|
||||
displacementFwd.dispose();
|
||||
displacementBwd.dispose();
|
||||
resized.dispose();
|
||||
return resultPoses[0];
|
||||
}
|
||||
dispose() {
|
||||
this.baseModel.dispose();
|
||||
}
|
||||
|
@ -4605,19 +4497,17 @@ var require_modelPoseNet = __commonJS((exports2) => {
|
|||
exports2.load = load;
|
||||
});
|
||||
|
||||
// src/posenet/index.js
|
||||
// src/posenet/posenet.js
|
||||
var require_posenet = __commonJS((exports2) => {
|
||||
const modelMobileNet = require_modelMobileNet();
|
||||
const modelPoseNet = require_modelPoseNet();
|
||||
const decodeMultiple = require_decodeMultiple();
|
||||
const decodeSingle = require_decodeSingle();
|
||||
const keypoints = require_keypoints2();
|
||||
const util = require_util2();
|
||||
exports2.load = modelPoseNet.load;
|
||||
exports2.PoseNet = modelPoseNet.PoseNet;
|
||||
exports2.MobileNet = modelMobileNet.MobileNet;
|
||||
exports2.decodeMultiplePoses = decodeMultiple.decodeMultiplePoses;
|
||||
exports2.decodeSinglePose = decodeSingle.decodeSinglePose;
|
||||
exports2.partChannels = keypoints.partChannels;
|
||||
exports2.partIds = keypoints.partIds;
|
||||
exports2.partNames = keypoints.partNames;
|
||||
|
@ -4700,17 +4590,18 @@ var require_box2 = __commonJS((exports2) => {
|
|||
exports2.shiftBox = shiftBox;
|
||||
});
|
||||
|
||||
// src/handpose/hand.js
|
||||
var require_hand = __commonJS((exports2) => {
|
||||
// src/handpose/handdetector.js
|
||||
var require_handdetector = __commonJS((exports2) => {
|
||||
const tf2 = require("@tensorflow/tfjs");
|
||||
const bounding = require_box2();
|
||||
class HandDetector {
|
||||
constructor(model, width, height, anchors, iouThreshold, scoreThreshold) {
|
||||
constructor(model, width, height, anchors, iouThreshold, scoreThreshold, maxHands) {
|
||||
this.model = model;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.iouThreshold = iouThreshold;
|
||||
this.scoreThreshold = scoreThreshold;
|
||||
this.maxHands = maxHands;
|
||||
this.anchors = anchors.map((anchor) => [anchor.x_center, anchor.y_center]);
|
||||
this.anchorsTensor = tf2.tensor2d(this.anchors);
|
||||
this.inputSizeTensor = tf2.tensor1d([width, height]);
|
||||
|
@ -4735,20 +4626,12 @@ var require_hand = __commonJS((exports2) => {
|
|||
}
|
||||
async getBoundingBoxes(input) {
|
||||
const normalizedInput = tf2.tidy(() => tf2.mul(tf2.sub(input, 0.5), 2));
|
||||
let batchedPrediction;
|
||||
if (tf2.getBackend() === "webgl") {
|
||||
const savedWebglPackDepthwiseConvFlag = tf2.env().get("WEBGL_PACK_DEPTHWISECONV");
|
||||
tf2.env().set("WEBGL_PACK_DEPTHWISECONV", true);
|
||||
batchedPrediction = this.model.predict(normalizedInput);
|
||||
tf2.env().set("WEBGL_PACK_DEPTHWISECONV", savedWebglPackDepthwiseConvFlag);
|
||||
} else {
|
||||
batchedPrediction = this.model.predict(normalizedInput);
|
||||
}
|
||||
const batchedPrediction = this.model.predict(normalizedInput);
|
||||
const prediction = batchedPrediction.squeeze();
|
||||
const scores = tf2.tidy(() => tf2.sigmoid(tf2.slice(prediction, [0, 0], [-1, 1])).squeeze());
|
||||
const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]);
|
||||
const boxes = this.normalizeBoxes(rawBoxes);
|
||||
const boxesWithHandsTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, 1, this.iouThreshold, this.scoreThreshold);
|
||||
const boxesWithHandsTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
||||
const boxesWithHands = await boxesWithHandsTensor.array();
|
||||
const toDispose = [
|
||||
normalizedInput,
|
||||
|
@ -4763,34 +4646,39 @@ var require_hand = __commonJS((exports2) => {
|
|||
toDispose.forEach((tensor) => tensor.dispose());
|
||||
return null;
|
||||
}
|
||||
const boxIndex = boxesWithHands[0];
|
||||
const matchingBox = tf2.slice(boxes, [boxIndex, 0], [1, -1]);
|
||||
const rawPalmLandmarks = tf2.slice(prediction, [boxIndex, 5], [1, 14]);
|
||||
const palmLandmarks = tf2.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([
|
||||
-1,
|
||||
2
|
||||
]));
|
||||
toDispose.push(rawPalmLandmarks);
|
||||
toDispose.forEach((tensor) => tensor.dispose());
|
||||
return {boxes: matchingBox, palmLandmarks};
|
||||
const detectedHands = tf2.tidy(() => {
|
||||
const detectedBoxes = [];
|
||||
for (const i in boxesWithHands) {
|
||||
const boxIndex = boxesWithHands[i];
|
||||
const matchingBox = tf2.slice(boxes, [boxIndex, 0], [1, -1]);
|
||||
const rawPalmLandmarks = tf2.slice(prediction, [boxIndex, 5], [1, 14]);
|
||||
const palmLandmarks = tf2.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));
|
||||
detectedBoxes.push({boxes: matchingBox, palmLandmarks});
|
||||
}
|
||||
return detectedBoxes;
|
||||
});
|
||||
return detectedHands;
|
||||
}
|
||||
async estimateHandBounds(input) {
|
||||
const inputHeight = input.shape[1];
|
||||
const inputWidth = input.shape[2];
|
||||
const image = tf2.tidy(() => input.resizeBilinear([this.width, this.height]).div(255));
|
||||
const prediction = await this.getBoundingBoxes(image);
|
||||
if (prediction === null) {
|
||||
image.dispose();
|
||||
return null;
|
||||
}
|
||||
const boundingBoxes = await prediction.boxes.array();
|
||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
||||
const palmLandmarks = await prediction.palmLandmarks.array();
|
||||
const predictions = await this.getBoundingBoxes(image);
|
||||
image.dispose();
|
||||
prediction.boxes.dispose();
|
||||
prediction.palmLandmarks.dispose();
|
||||
return bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [inputWidth / this.width, inputHeight / this.height]);
|
||||
if (!predictions || predictions.length === 0)
|
||||
return null;
|
||||
const hands = [];
|
||||
for (const i in predictions) {
|
||||
const prediction = predictions[i];
|
||||
const boundingBoxes = await prediction.boxes.array();
|
||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
||||
const palmLandmarks = await prediction.palmLandmarks.array();
|
||||
prediction.boxes.dispose();
|
||||
prediction.palmLandmarks.dispose();
|
||||
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [inputWidth / this.width, inputHeight / this.height]));
|
||||
}
|
||||
return hands;
|
||||
}
|
||||
}
|
||||
exports2.HandDetector = HandDetector;
|
||||
|
@ -4894,13 +4782,14 @@ var require_pipeline2 = __commonJS((exports2) => {
|
|||
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
||||
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
||||
class HandPipeline {
|
||||
constructor(boundingBoxDetector, meshDetector, meshWidth, meshHeight, maxContinuousChecks, detectionConfidence) {
|
||||
constructor(boundingBoxDetector, meshDetector, meshWidth, meshHeight, maxContinuousChecks, detectionConfidence, maxHands) {
|
||||
this.regionsOfInterest = [];
|
||||
this.runsWithoutHandDetector = 0;
|
||||
this.boundingBoxDetector = boundingBoxDetector;
|
||||
this.meshDetector = meshDetector;
|
||||
this.maxContinuousChecks = maxContinuousChecks;
|
||||
this.detectionConfidence = detectionConfidence;
|
||||
this.maxHands = maxHands;
|
||||
this.meshWidth = meshWidth;
|
||||
this.meshHeight = meshHeight;
|
||||
this.maxHandsNumber = 1;
|
||||
|
@ -4951,62 +4840,60 @@ var require_pipeline2 = __commonJS((exports2) => {
|
|||
async estimateHand(image, config) {
|
||||
const useFreshBox = this.shouldUpdateRegionsOfInterest();
|
||||
if (useFreshBox === true) {
|
||||
const boundingBoxPrediction = await this.boundingBoxDetector.estimateHandBounds(image);
|
||||
if (boundingBoxPrediction === null) {
|
||||
image.dispose();
|
||||
this.regionsOfInterest = [];
|
||||
return null;
|
||||
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image);
|
||||
this.regionsOfInterest = [];
|
||||
for (const i in boundingBoxPredictions) {
|
||||
this.updateRegionsOfInterest(boundingBoxPredictions[i], true, i);
|
||||
}
|
||||
this.updateRegionsOfInterest(boundingBoxPrediction, true);
|
||||
this.runsWithoutHandDetector = 0;
|
||||
} else {
|
||||
this.runsWithoutHandDetector++;
|
||||
}
|
||||
const currentBox = this.regionsOfInterest[0];
|
||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||
const palmCenter = bounding.getBoxCenter(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
|
||||
const rotatedImage = tf2.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);
|
||||
const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);
|
||||
const box = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = bounding.cutBoxFromImageAndResize(box, rotatedImage, [this.meshWidth, this.meshHeight]);
|
||||
const handImage = croppedInput.div(255);
|
||||
croppedInput.dispose();
|
||||
rotatedImage.dispose();
|
||||
let prediction;
|
||||
if (tf2.getBackend() === "webgl") {
|
||||
const savedWebglPackDepthwiseConvFlag = tf2.env().get("WEBGL_PACK_DEPTHWISECONV");
|
||||
tf2.env().set("WEBGL_PACK_DEPTHWISECONV", true);
|
||||
prediction = this.meshDetector.predict(handImage);
|
||||
tf2.env().set("WEBGL_PACK_DEPTHWISECONV", savedWebglPackDepthwiseConvFlag);
|
||||
} else {
|
||||
prediction = this.meshDetector.predict(handImage);
|
||||
}
|
||||
const [flag, keypoints] = prediction;
|
||||
handImage.dispose();
|
||||
const flagValue = flag.dataSync()[0];
|
||||
flag.dispose();
|
||||
if (flagValue < config.minConfidence) {
|
||||
keypoints.dispose();
|
||||
this.regionsOfInterest = [];
|
||||
return null;
|
||||
}
|
||||
const keypointsReshaped = tf2.reshape(keypoints, [-1, 3]);
|
||||
const rawCoords = keypointsReshaped.arraySync();
|
||||
keypoints.dispose();
|
||||
keypointsReshaped.dispose();
|
||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords);
|
||||
this.updateRegionsOfInterest(nextBoundingBox, false);
|
||||
const result = {
|
||||
landmarks: coords,
|
||||
confidence: flagValue,
|
||||
box: {
|
||||
topLeft: nextBoundingBox.startPoint,
|
||||
bottomRight: nextBoundingBox.endPoint
|
||||
const hands = [];
|
||||
if (!this.regionsOfInterest)
|
||||
return hands;
|
||||
for (const i in this.regionsOfInterest) {
|
||||
const currentBox = this.regionsOfInterest[i][0];
|
||||
if (!currentBox)
|
||||
return hands;
|
||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||
const palmCenter = bounding.getBoxCenter(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
|
||||
const rotatedImage = tf2.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);
|
||||
const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);
|
||||
const box = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = bounding.cutBoxFromImageAndResize(box, rotatedImage, [this.meshWidth, this.meshHeight]);
|
||||
const handImage = croppedInput.div(255);
|
||||
croppedInput.dispose();
|
||||
rotatedImage.dispose();
|
||||
const prediction = this.meshDetector.predict(handImage);
|
||||
const [flag, keypoints] = prediction;
|
||||
handImage.dispose();
|
||||
const flagValue = flag.dataSync()[0];
|
||||
flag.dispose();
|
||||
if (flagValue < config.minConfidence) {
|
||||
keypoints.dispose();
|
||||
this.regionsOfInterest[i] = [];
|
||||
return hands;
|
||||
}
|
||||
};
|
||||
return result;
|
||||
const keypointsReshaped = tf2.reshape(keypoints, [-1, 3]);
|
||||
const rawCoords = await keypointsReshaped.array();
|
||||
keypoints.dispose();
|
||||
keypointsReshaped.dispose();
|
||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords);
|
||||
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
||||
const result = {
|
||||
landmarks: coords,
|
||||
confidence: flagValue,
|
||||
box: {
|
||||
topLeft: nextBoundingBox.startPoint,
|
||||
bottomRight: nextBoundingBox.endPoint
|
||||
}
|
||||
};
|
||||
hands.push(result);
|
||||
}
|
||||
return hands;
|
||||
}
|
||||
calculateLandmarksBoundingBox(landmarks) {
|
||||
const xs = landmarks.map((d) => d[0]);
|
||||
|
@ -5015,11 +4902,11 @@ var require_pipeline2 = __commonJS((exports2) => {
|
|||
const endPoint = [Math.max(...xs), Math.max(...ys)];
|
||||
return {startPoint, endPoint};
|
||||
}
|
||||
updateRegionsOfInterest(box, forceUpdate) {
|
||||
updateRegionsOfInterest(box, forceUpdate, index) {
|
||||
if (forceUpdate) {
|
||||
this.regionsOfInterest = [box];
|
||||
this.regionsOfInterest[index] = [box];
|
||||
} else {
|
||||
const previousBox = this.regionsOfInterest[0];
|
||||
const previousBox = this.regionsOfInterest[index][0];
|
||||
let iou = 0;
|
||||
if (previousBox != null && previousBox.startPoint != null) {
|
||||
const [boxStartX, boxStartY] = box.startPoint;
|
||||
|
@ -5035,21 +4922,20 @@ var require_pipeline2 = __commonJS((exports2) => {
|
|||
const previousBoxArea = (previousBoxEndX - previousBoxStartX) * (previousBoxEndY - boxStartY);
|
||||
iou = intersection / (boxArea + previousBoxArea - intersection);
|
||||
}
|
||||
this.regionsOfInterest[0] = iou > UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD ? previousBox : box;
|
||||
this.regionsOfInterest[index][0] = iou > UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD ? previousBox : box;
|
||||
}
|
||||
}
|
||||
shouldUpdateRegionsOfInterest() {
|
||||
const roisCount = this.regionsOfInterest.length;
|
||||
return roisCount !== this.maxHandsNumber || this.runsWithoutHandDetector >= this.maxContinuousChecks;
|
||||
return this.regionsOfInterest === 0 || this.runsWithoutHandDetector >= this.maxContinuousChecks;
|
||||
}
|
||||
}
|
||||
exports2.HandPipeline = HandPipeline;
|
||||
});
|
||||
|
||||
// src/handpose/index.js
|
||||
// src/handpose/handpose.js
|
||||
var require_handpose = __commonJS((exports2) => {
|
||||
const tf2 = require("@tensorflow/tfjs");
|
||||
const hand = require_hand();
|
||||
const hand = require_handdetector();
|
||||
const keypoints = require_keypoints3();
|
||||
const pipe = require_pipeline2();
|
||||
async function loadHandDetectorModel(url) {
|
||||
|
@ -5072,8 +4958,8 @@ var require_handpose = __commonJS((exports2) => {
|
|||
loadHandDetectorModel(config.detector.modelPath),
|
||||
loadHandPoseModel(config.skeleton.modelPath)
|
||||
]);
|
||||
const detector = new hand.HandDetector(handDetectorModel, config.inputSize, config.inputSize, ANCHORS, config.iouThreshold, config.scoreThreshold);
|
||||
const pipeline = new pipe.HandPipeline(detector, handPoseModel, config.inputSize, config.inputSize, config.skipFrames, config.minConfidence);
|
||||
const detector = new hand.HandDetector(handDetectorModel, config.inputSize, config.inputSize, ANCHORS, config.iouThreshold, config.scoreThreshold, config.maxHands);
|
||||
const pipeline = new pipe.HandPipeline(detector, handPoseModel, config.inputSize, config.inputSize, config.skipFrames, config.minConfidence, config.maxHands);
|
||||
const handpose2 = new HandPose(pipeline);
|
||||
return handpose2;
|
||||
}
|
||||
|
@ -5089,20 +4975,26 @@ var require_handpose = __commonJS((exports2) => {
|
|||
}
|
||||
return input.toFloat().expandDims(0);
|
||||
});
|
||||
const prediction = await this.pipeline.estimateHand(image, config);
|
||||
const predictions = await this.pipeline.estimateHand(image, config);
|
||||
image.dispose();
|
||||
if (!prediction)
|
||||
return [];
|
||||
const annotations = {};
|
||||
for (const key of Object.keys(keypoints.MESH_ANNOTATIONS)) {
|
||||
annotations[key] = keypoints.MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);
|
||||
const hands = [];
|
||||
if (!predictions)
|
||||
return hands;
|
||||
for (const prediction of predictions) {
|
||||
if (!prediction)
|
||||
return [];
|
||||
const annotations = {};
|
||||
for (const key of Object.keys(keypoints.MESH_ANNOTATIONS)) {
|
||||
annotations[key] = keypoints.MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);
|
||||
}
|
||||
hands.push({
|
||||
confidence: prediction.confidence || 0,
|
||||
box: prediction.box ? [prediction.box.topLeft[0], prediction.box.topLeft[1], prediction.box.bottomRight[0] - prediction.box.topLeft[0], prediction.box.bottomRight[1] - prediction.box.topLeft[1]] : 0,
|
||||
landmarks: prediction.landmarks,
|
||||
annotations
|
||||
});
|
||||
}
|
||||
return [{
|
||||
confidence: prediction.confidence || 0,
|
||||
box: prediction.box ? [prediction.box.topLeft[0], prediction.box.topLeft[1], prediction.box.bottomRight[0] - prediction.box.topLeft[0], prediction.box.bottomRight[1] - prediction.box.topLeft[1]] : 0,
|
||||
landmarks: prediction.landmarks,
|
||||
annotations
|
||||
}];
|
||||
return hands;
|
||||
}
|
||||
}
|
||||
exports2.HandPose = HandPose;
|
||||
|
@ -5120,10 +5012,10 @@ var require_config = __commonJS((exports2) => {
|
|||
modelPath: "../models/blazeface/model.json",
|
||||
inputSize: 128,
|
||||
maxFaces: 10,
|
||||
skipFrames: 5,
|
||||
minConfidence: 0.8,
|
||||
skipFrames: 10,
|
||||
minConfidence: 0.5,
|
||||
iouThreshold: 0.3,
|
||||
scoreThreshold: 0.75
|
||||
scoreThreshold: 0.5
|
||||
},
|
||||
mesh: {
|
||||
enabled: true,
|
||||
|
@ -5139,7 +5031,7 @@ var require_config = __commonJS((exports2) => {
|
|||
enabled: true,
|
||||
modelPath: "../models/ssrnet-age/imdb/model.json",
|
||||
inputSize: 64,
|
||||
skipFrames: 5
|
||||
skipFrames: 10
|
||||
},
|
||||
gender: {
|
||||
enabled: true,
|
||||
|
@ -5152,16 +5044,17 @@ var require_config = __commonJS((exports2) => {
|
|||
inputResolution: 257,
|
||||
outputStride: 16,
|
||||
maxDetections: 5,
|
||||
scoreThreshold: 0.75,
|
||||
scoreThreshold: 0.5,
|
||||
nmsRadius: 20
|
||||
},
|
||||
hand: {
|
||||
enabled: true,
|
||||
inputSize: 256,
|
||||
skipFrames: 5,
|
||||
minConfidence: 0.8,
|
||||
skipFrames: 10,
|
||||
minConfidence: 0.5,
|
||||
iouThreshold: 0.3,
|
||||
scoreThreshold: 0.75,
|
||||
scoreThreshold: 0.5,
|
||||
maxHands: 2,
|
||||
detector: {
|
||||
anchors: "../models/handdetect/anchors.json",
|
||||
modelPath: "../models/handdetect/model.json"
|
||||
|
@ -5217,9 +5110,14 @@ async function detect(input, userConfig) {
|
|||
if (config.face.enabled && !models.facemesh)
|
||||
models.facemesh = await facemesh.load(config.face);
|
||||
tf.engine().startScope();
|
||||
let savedWebglPackDepthwiseConvFlag;
|
||||
if (tf.getBackend() === "webgl") {
|
||||
savedWebglPackDepthwiseConvFlag = tf.env().get("WEBGL_PACK_DEPTHWISECONV");
|
||||
tf.env().set("WEBGL_PACK_DEPTHWISECONV", true);
|
||||
}
|
||||
let poseRes = [];
|
||||
if (config.body.enabled)
|
||||
poseRes = await models.posenet.estimateMultiplePoses(input, config.body);
|
||||
poseRes = await models.posenet.estimatePoses(input, config.body);
|
||||
let handRes = [];
|
||||
if (config.hand.enabled)
|
||||
handRes = await models.handpose.estimateHands(input, config.hand);
|
||||
|
@ -5241,6 +5139,7 @@ async function detect(input, userConfig) {
|
|||
});
|
||||
}
|
||||
}
|
||||
tf.env().set("WEBGL_PACK_DEPTHWISECONV", savedWebglPackDepthwiseConvFlag);
|
||||
tf.engine().endScope();
|
||||
resolve({face: faceRes, body: poseRes, hand: handRes});
|
||||
});
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -98,6 +98,23 @@
|
|||
"lodash": "^4.17.19",
|
||||
"minimatch": "^3.0.4",
|
||||
"strip-json-comments": "^3.1.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz",
|
||||
"integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ms": "2.1.2"
|
||||
}
|
||||
},
|
||||
"strip-json-comments": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
|
||||
"integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"@tensorflow/tfjs": {
|
||||
|
@ -179,6 +196,34 @@
|
|||
"integrity": "sha512-nU9WNSGpEU6GzKo5bvJBMa/OZRe1bR5Z2W6T0XiEY8CBiPNS+oJFJNm0NY8kQj/WnDS0Hfue38P46q7gV/9XMA==",
|
||||
"dev": true
|
||||
},
|
||||
"@tensorflow/tfjs-node": {
|
||||
"version": "2.6.0",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-node/-/tfjs-node-2.6.0.tgz",
|
||||
"integrity": "sha512-Yp1PICAVD3jBhqEShlzZHC9uOtT8axpFeciw8TeI4KxnIydWQOnrisI09z6uR7shCwNY4TM6txhpkO5b/RDyvw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@tensorflow/tfjs": "2.6.0",
|
||||
"@tensorflow/tfjs-core": "2.6.0",
|
||||
"adm-zip": "^0.4.11",
|
||||
"google-protobuf": "^3.9.2",
|
||||
"https-proxy-agent": "^2.2.1",
|
||||
"node-pre-gyp": "0.14.0",
|
||||
"progress": "^2.0.0",
|
||||
"rimraf": "^2.6.2",
|
||||
"tar": "^4.4.6"
|
||||
},
|
||||
"dependencies": {
|
||||
"rimraf": {
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
|
||||
"integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"glob": "^7.1.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@types/json5": {
|
||||
"version": "0.0.29",
|
||||
"resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz",
|
||||
|
@ -225,6 +270,12 @@
|
|||
"integrity": "sha512-PACt1xdErJbMUOUweSrbVM7gSIYm1vTncW2hF6Os/EeWi6TXYAYMPp+8v6rzHmypE5gHrxaxZNXgMkJVIdZpHw==",
|
||||
"dev": true
|
||||
},
|
||||
"abbrev": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
|
||||
"integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==",
|
||||
"dev": true
|
||||
},
|
||||
"acorn": {
|
||||
"version": "7.4.1",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz",
|
||||
|
@ -237,6 +288,21 @@
|
|||
"integrity": "sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng==",
|
||||
"dev": true
|
||||
},
|
||||
"adm-zip": {
|
||||
"version": "0.4.16",
|
||||
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.4.16.tgz",
|
||||
"integrity": "sha512-TFi4HBKSGfIKsK5YCkKaaFG2m4PEDyViZmEwof3MTIgzimHLto6muaHVpbrljdIvIrFZzEq/p4nafOeLcYegrg==",
|
||||
"dev": true
|
||||
},
|
||||
"agent-base": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz",
|
||||
"integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"es6-promisify": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"ajv": {
|
||||
"version": "6.12.6",
|
||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
|
||||
|
@ -270,6 +336,22 @@
|
|||
"color-convert": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"aproba": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz",
|
||||
"integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==",
|
||||
"dev": true
|
||||
},
|
||||
"are-we-there-yet": {
|
||||
"version": "1.1.5",
|
||||
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz",
|
||||
"integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"delegates": "^1.0.0",
|
||||
"readable-stream": "^2.0.6"
|
||||
}
|
||||
},
|
||||
"argparse": {
|
||||
"version": "1.0.10",
|
||||
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
|
||||
|
@ -386,6 +468,12 @@
|
|||
"supports-color": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"chownr": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
|
||||
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==",
|
||||
"dev": true
|
||||
},
|
||||
"cliui": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.1.tgz",
|
||||
|
@ -397,6 +485,12 @@
|
|||
"wrap-ansi": "^7.0.0"
|
||||
}
|
||||
},
|
||||
"code-point-at": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz",
|
||||
"integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=",
|
||||
"dev": true
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
|
@ -433,6 +527,12 @@
|
|||
"integrity": "sha512-KbS1Y0jMtyPgIxjO7ZzMAuUpAKMt1SzCL9fsrKsX6b0zJPTaT0SiSPmewwVZg9UAO83HVIlEhZF84LIjZ0lmAw==",
|
||||
"dev": true
|
||||
},
|
||||
"console-control-strings": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
|
||||
"integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=",
|
||||
"dev": true
|
||||
},
|
||||
"contains-path": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz",
|
||||
|
@ -445,6 +545,12 @@
|
|||
"integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==",
|
||||
"dev": true
|
||||
},
|
||||
"core-util-is": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
|
||||
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=",
|
||||
"dev": true
|
||||
},
|
||||
"cross-spawn": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
|
||||
|
@ -457,14 +563,20 @@
|
|||
}
|
||||
},
|
||||
"debug": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz",
|
||||
"integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==",
|
||||
"version": "3.2.6",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz",
|
||||
"integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ms": "2.1.2"
|
||||
"ms": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"deep-extend": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
|
||||
"integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==",
|
||||
"dev": true
|
||||
},
|
||||
"deep-is": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz",
|
||||
|
@ -486,6 +598,18 @@
|
|||
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=",
|
||||
"dev": true
|
||||
},
|
||||
"delegates": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
|
||||
"integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=",
|
||||
"dev": true
|
||||
},
|
||||
"detect-libc": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
|
||||
"integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=",
|
||||
"dev": true
|
||||
},
|
||||
"doctrine": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
|
||||
|
@ -550,6 +674,21 @@
|
|||
"is-symbol": "^1.0.2"
|
||||
}
|
||||
},
|
||||
"es6-promise": {
|
||||
"version": "4.2.8",
|
||||
"resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz",
|
||||
"integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==",
|
||||
"dev": true
|
||||
},
|
||||
"es6-promisify": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz",
|
||||
"integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"es6-promise": "^4.0.3"
|
||||
}
|
||||
},
|
||||
"esbuild": {
|
||||
"version": "0.7.15",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.7.15.tgz",
|
||||
|
@ -611,6 +750,29 @@
|
|||
"table": "^5.2.3",
|
||||
"text-table": "^0.2.0",
|
||||
"v8-compile-cache": "^2.0.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz",
|
||||
"integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ms": "2.1.2"
|
||||
}
|
||||
},
|
||||
"semver": {
|
||||
"version": "7.3.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz",
|
||||
"integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==",
|
||||
"dev": true
|
||||
},
|
||||
"strip-json-comments": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
|
||||
"integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"eslint-config-airbnb-base": {
|
||||
|
@ -959,6 +1121,15 @@
|
|||
"mime-types": "^2.1.12"
|
||||
}
|
||||
},
|
||||
"fs-minipass": {
|
||||
"version": "1.2.7",
|
||||
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz",
|
||||
"integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"minipass": "^2.6.0"
|
||||
}
|
||||
},
|
||||
"fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
|
@ -977,6 +1148,59 @@
|
|||
"integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=",
|
||||
"dev": true
|
||||
},
|
||||
"gauge": {
|
||||
"version": "2.7.4",
|
||||
"resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz",
|
||||
"integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"aproba": "^1.0.3",
|
||||
"console-control-strings": "^1.0.0",
|
||||
"has-unicode": "^2.0.0",
|
||||
"object-assign": "^4.1.0",
|
||||
"signal-exit": "^3.0.0",
|
||||
"string-width": "^1.0.1",
|
||||
"strip-ansi": "^3.0.1",
|
||||
"wide-align": "^1.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ansi-regex": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
|
||||
"integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=",
|
||||
"dev": true
|
||||
},
|
||||
"is-fullwidth-code-point": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz",
|
||||
"integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"number-is-nan": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"string-width": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
|
||||
"integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"code-point-at": "^1.0.0",
|
||||
"is-fullwidth-code-point": "^1.0.0",
|
||||
"strip-ansi": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"strip-ansi": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
|
||||
"integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ansi-regex": "^2.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"get-caller-file": {
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
||||
|
@ -1015,6 +1239,12 @@
|
|||
"type-fest": "^0.8.1"
|
||||
}
|
||||
},
|
||||
"google-protobuf": {
|
||||
"version": "3.13.0",
|
||||
"resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.13.0.tgz",
|
||||
"integrity": "sha512-ZIf3qfLFayVrPvAjeKKxO5FRF1/NwRxt6Dko+fWEMuHwHbZx8/fcaAao9b0wCM6kr8qeg2te8XTpyuvKuD9aKw==",
|
||||
"dev": true
|
||||
},
|
||||
"graceful-fs": {
|
||||
"version": "4.2.4",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz",
|
||||
|
@ -1042,18 +1272,52 @@
|
|||
"integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==",
|
||||
"dev": true
|
||||
},
|
||||
"has-unicode": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
|
||||
"integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=",
|
||||
"dev": true
|
||||
},
|
||||
"hosted-git-info": {
|
||||
"version": "2.8.8",
|
||||
"resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz",
|
||||
"integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==",
|
||||
"dev": true
|
||||
},
|
||||
"https-proxy-agent": {
|
||||
"version": "2.2.4",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz",
|
||||
"integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"agent-base": "^4.3.0",
|
||||
"debug": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"iconv-lite": {
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"safer-buffer": ">= 2.1.2 < 3"
|
||||
}
|
||||
},
|
||||
"ignore": {
|
||||
"version": "4.0.6",
|
||||
"resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz",
|
||||
"integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==",
|
||||
"dev": true
|
||||
},
|
||||
"ignore-walk": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz",
|
||||
"integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"minimatch": "^3.0.4"
|
||||
}
|
||||
},
|
||||
"import-fresh": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz",
|
||||
|
@ -1086,6 +1350,12 @@
|
|||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
||||
"dev": true
|
||||
},
|
||||
"ini": {
|
||||
"version": "1.3.5",
|
||||
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
|
||||
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==",
|
||||
"dev": true
|
||||
},
|
||||
"is-arrayish": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
|
||||
|
@ -1278,6 +1548,25 @@
|
|||
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
|
||||
"dev": true
|
||||
},
|
||||
"minipass": {
|
||||
"version": "2.9.0",
|
||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz",
|
||||
"integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"safe-buffer": "^5.1.2",
|
||||
"yallist": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"minizlib": {
|
||||
"version": "1.3.3",
|
||||
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz",
|
||||
"integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"minipass": "^2.9.0"
|
||||
}
|
||||
},
|
||||
"mkdirp": {
|
||||
"version": "0.5.5",
|
||||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz",
|
||||
|
@ -1299,12 +1588,62 @@
|
|||
"integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=",
|
||||
"dev": true
|
||||
},
|
||||
"needle": {
|
||||
"version": "2.5.2",
|
||||
"resolved": "https://registry.npmjs.org/needle/-/needle-2.5.2.tgz",
|
||||
"integrity": "sha512-LbRIwS9BfkPvNwNHlsA41Q29kL2L/6VaOJ0qisM5lLWsTV3nP15abO5ITL6L81zqFhzjRKDAYjpcBcwM0AVvLQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"debug": "^3.2.6",
|
||||
"iconv-lite": "^0.4.4",
|
||||
"sax": "^1.2.4"
|
||||
}
|
||||
},
|
||||
"node-fetch": {
|
||||
"version": "2.6.1",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz",
|
||||
"integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==",
|
||||
"dev": true
|
||||
},
|
||||
"node-pre-gyp": {
|
||||
"version": "0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.14.0.tgz",
|
||||
"integrity": "sha512-+CvDC7ZttU/sSt9rFjix/P05iS43qHCOOGzcr3Ry99bXG7VX953+vFyEuph/tfqoYu8dttBkE86JSKBO2OzcxA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"detect-libc": "^1.0.2",
|
||||
"mkdirp": "^0.5.1",
|
||||
"needle": "^2.2.1",
|
||||
"nopt": "^4.0.1",
|
||||
"npm-packlist": "^1.1.6",
|
||||
"npmlog": "^4.0.2",
|
||||
"rc": "^1.2.7",
|
||||
"rimraf": "^2.6.1",
|
||||
"semver": "^5.3.0",
|
||||
"tar": "^4.4.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"rimraf": {
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
|
||||
"integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"glob": "^7.1.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"nopt": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz",
|
||||
"integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"abbrev": "1",
|
||||
"osenv": "^0.1.4"
|
||||
}
|
||||
},
|
||||
"normalize-package-data": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz",
|
||||
|
@ -1315,16 +1654,58 @@
|
|||
"resolve": "^1.10.0",
|
||||
"semver": "2 || 3 || 4 || 5",
|
||||
"validate-npm-package-license": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"semver": {
|
||||
"version": "5.7.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
|
||||
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"npm-bundled": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz",
|
||||
"integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"npm-normalize-package-bin": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"npm-normalize-package-bin": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz",
|
||||
"integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==",
|
||||
"dev": true
|
||||
},
|
||||
"npm-packlist": {
|
||||
"version": "1.4.8",
|
||||
"resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.8.tgz",
|
||||
"integrity": "sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ignore-walk": "^3.0.1",
|
||||
"npm-bundled": "^1.0.1",
|
||||
"npm-normalize-package-bin": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"npmlog": {
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz",
|
||||
"integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"are-we-there-yet": "~1.1.2",
|
||||
"console-control-strings": "~1.1.0",
|
||||
"gauge": "~2.7.3",
|
||||
"set-blocking": "~2.0.0"
|
||||
}
|
||||
},
|
||||
"number-is-nan": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz",
|
||||
"integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=",
|
||||
"dev": true
|
||||
},
|
||||
"object-assign": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
|
||||
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=",
|
||||
"dev": true
|
||||
},
|
||||
"object-inspect": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.8.0.tgz",
|
||||
|
@ -1437,6 +1818,28 @@
|
|||
"word-wrap": "^1.2.3"
|
||||
}
|
||||
},
|
||||
"os-homedir": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
|
||||
"integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=",
|
||||
"dev": true
|
||||
},
|
||||
"os-tmpdir": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
|
||||
"integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=",
|
||||
"dev": true
|
||||
},
|
||||
"osenv": {
|
||||
"version": "0.1.5",
|
||||
"resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz",
|
||||
"integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"os-homedir": "^1.0.0",
|
||||
"os-tmpdir": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"p-limit": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz",
|
||||
|
@ -1533,6 +1936,12 @@
|
|||
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
|
||||
"dev": true
|
||||
},
|
||||
"process-nextick-args": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
||||
"integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==",
|
||||
"dev": true
|
||||
},
|
||||
"progress": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz",
|
||||
|
@ -1545,6 +1954,18 @@
|
|||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
|
||||
"dev": true
|
||||
},
|
||||
"rc": {
|
||||
"version": "1.2.8",
|
||||
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
|
||||
"integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"deep-extend": "^0.6.0",
|
||||
"ini": "~1.3.0",
|
||||
"minimist": "^1.2.0",
|
||||
"strip-json-comments": "~2.0.1"
|
||||
}
|
||||
},
|
||||
"read-pkg": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz",
|
||||
|
@ -1566,6 +1987,21 @@
|
|||
"read-pkg": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"readable-stream": {
|
||||
"version": "2.3.7",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
|
||||
"integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"core-util-is": "~1.0.0",
|
||||
"inherits": "~2.0.3",
|
||||
"isarray": "~1.0.0",
|
||||
"process-nextick-args": "~2.0.0",
|
||||
"safe-buffer": "~5.1.1",
|
||||
"string_decoder": "~1.1.1",
|
||||
"util-deprecate": "~1.0.1"
|
||||
}
|
||||
},
|
||||
"regenerator-runtime": {
|
||||
"version": "0.13.7",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz",
|
||||
|
@ -1608,6 +2044,24 @@
|
|||
"glob": "^7.1.3"
|
||||
}
|
||||
},
|
||||
"safe-buffer": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
|
||||
"dev": true
|
||||
},
|
||||
"safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
||||
"dev": true
|
||||
},
|
||||
"sax": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
|
||||
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==",
|
||||
"dev": true
|
||||
},
|
||||
"seedrandom": {
|
||||
"version": "2.4.3",
|
||||
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz",
|
||||
|
@ -1615,9 +2069,15 @@
|
|||
"dev": true
|
||||
},
|
||||
"semver": {
|
||||
"version": "7.3.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz",
|
||||
"integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==",
|
||||
"version": "5.7.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
|
||||
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
|
||||
"dev": true
|
||||
},
|
||||
"set-blocking": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
|
||||
"integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=",
|
||||
"dev": true
|
||||
},
|
||||
"shebang-command": {
|
||||
|
@ -1635,6 +2095,12 @@
|
|||
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
|
||||
"dev": true
|
||||
},
|
||||
"signal-exit": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz",
|
||||
"integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==",
|
||||
"dev": true
|
||||
},
|
||||
"slice-ansi": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz",
|
||||
|
@ -1789,6 +2255,15 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"string_decoder": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
|
||||
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"safe-buffer": "~5.1.0"
|
||||
}
|
||||
},
|
||||
"strip-ansi": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz",
|
||||
|
@ -1805,9 +2280,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"strip-json-comments": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
|
||||
"integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
|
||||
"integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=",
|
||||
"dev": true
|
||||
},
|
||||
"supports-color": {
|
||||
|
@ -1871,6 +2346,21 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"tar": {
|
||||
"version": "4.4.13",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz",
|
||||
"integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"chownr": "^1.1.1",
|
||||
"fs-minipass": "^1.2.5",
|
||||
"minipass": "^2.8.6",
|
||||
"minizlib": "^1.2.1",
|
||||
"mkdirp": "^0.5.0",
|
||||
"safe-buffer": "^5.1.2",
|
||||
"yallist": "^3.0.3"
|
||||
}
|
||||
},
|
||||
"text-table": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
|
||||
|
@ -1913,6 +2403,12 @@
|
|||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"util-deprecate": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=",
|
||||
"dev": true
|
||||
},
|
||||
"v8-compile-cache": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.1.tgz",
|
||||
|
@ -1975,6 +2471,48 @@
|
|||
"isexe": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"wide-align": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz",
|
||||
"integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"string-width": "^1.0.2 || 2"
|
||||
},
|
||||
"dependencies": {
|
||||
"ansi-regex": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz",
|
||||
"integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=",
|
||||
"dev": true
|
||||
},
|
||||
"is-fullwidth-code-point": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
|
||||
"integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=",
|
||||
"dev": true
|
||||
},
|
||||
"string-width": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz",
|
||||
"integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-fullwidth-code-point": "^2.0.0",
|
||||
"strip-ansi": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"strip-ansi": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
|
||||
"integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ansi-regex": "^3.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"word-wrap": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
|
||||
|
@ -2013,6 +2551,12 @@
|
|||
"integrity": "sha512-CkwaeZw6dQgqgPGeTWKMXCRmMcBgETFlTml1+ZOO+q7kGst8NREJ+eWwFNPVUQ4QGdAaklbqCZHH6Zuep1RjiA==",
|
||||
"dev": true
|
||||
},
|
||||
"yallist": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
|
||||
"integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
|
||||
"dev": true
|
||||
},
|
||||
"yargs": {
|
||||
"version": "16.0.3",
|
||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.0.3.tgz",
|
||||
|
|
12
package.json
12
package.json
|
@ -5,7 +5,7 @@
|
|||
"sideEffects": false,
|
||||
"main": "dist/human.node.js",
|
||||
"module": "dist/human.esm.js",
|
||||
"browser": "dist/human.js",
|
||||
"browser": "dist/human.esmjs",
|
||||
"author": "Vladimir Mandic <mandic00@live.com>",
|
||||
"bugs": {
|
||||
"url": "https://github.com/vladmandic/human/issues"
|
||||
|
@ -20,11 +20,10 @@
|
|||
"url": "git+https://github.com/vladmandic/human.git"
|
||||
},
|
||||
"dependencies": {},
|
||||
"peerDependencies": {
|
||||
"@tensorflow/tfjs-node": "^2.6.0"
|
||||
},
|
||||
"peerDependencies": {},
|
||||
"devDependencies": {
|
||||
"@tensorflow/tfjs": "^2.6.0",
|
||||
"@tensorflow/tfjs-node": "^2.6.0",
|
||||
"esbuild": "^0.7.15",
|
||||
"eslint": "^7.10.0",
|
||||
"eslint-config-airbnb-base": "^14.2.0",
|
||||
|
@ -37,9 +36,10 @@
|
|||
"scripts": {
|
||||
"start": "node --trace-warnings --trace-uncaught --no-deprecation demo/demo-node.js",
|
||||
"lint": "eslint src/*.js demo/*.js",
|
||||
"build": "rimraf dist/ && npm run build-esm && npm run build-iife && npm run build-node",
|
||||
"build-esm": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --outfile=dist/human.esm.js src/index.js",
|
||||
"build": "rimraf dist/ && npm run build-iife && npm run build-esm && npm run build-nobundle && npm run build-node && ls -l dist/",
|
||||
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --outfile=dist/human.js src/index.js",
|
||||
"build-esm": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --external:fs --outfile=dist/human.esm.js src/index.js",
|
||||
"build-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --outfile=dist/human.nobundle.js src/index.js",
|
||||
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --outfile=dist/human.node.js src/index.js",
|
||||
"update": "npm update --depth 20 && npm dedupe && npm prune && npm audit"
|
||||
},
|
||||
|
|
|
@ -5,10 +5,10 @@ export default {
|
|||
modelPath: '../models/blazeface/model.json',
|
||||
inputSize: 128, // fixed value
|
||||
maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance
|
||||
skipFrames: 5, // how many frames to go without running the bounding box detector, only relevant if maxFaces > 1
|
||||
minConfidence: 0.8, // threshold for discarding a prediction
|
||||
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression, must be between [0, 1]
|
||||
scoreThreshold: 0.75, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||
skipFrames: 10, // how many frames to go without running the bounding box detector
|
||||
minConfidence: 0.5, // threshold for discarding a prediction
|
||||
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||
scoreThreshold: 0.7, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||
},
|
||||
mesh: {
|
||||
enabled: true,
|
||||
|
@ -24,7 +24,7 @@ export default {
|
|||
enabled: true,
|
||||
modelPath: '../models/ssrnet-age/imdb/model.json',
|
||||
inputSize: 64, // fixed value
|
||||
skipFrames: 5,
|
||||
skipFrames: 10,
|
||||
},
|
||||
gender: {
|
||||
enabled: true,
|
||||
|
@ -37,16 +37,17 @@ export default {
|
|||
inputResolution: 257, // fixed value
|
||||
outputStride: 16, // fixed value
|
||||
maxDetections: 5,
|
||||
scoreThreshold: 0.75,
|
||||
scoreThreshold: 0.7,
|
||||
nmsRadius: 20,
|
||||
},
|
||||
hand: {
|
||||
enabled: true,
|
||||
inputSize: 256, // fixed value
|
||||
skipFrames: 5,
|
||||
minConfidence: 0.8,
|
||||
skipFrames: 10,
|
||||
minConfidence: 0.5,
|
||||
iouThreshold: 0.3,
|
||||
scoreThreshold: 0.75,
|
||||
scoreThreshold: 0.7,
|
||||
maxHands: 2,
|
||||
detector: {
|
||||
anchors: '../models/handdetect/anchors.json',
|
||||
modelPath: '../models/handdetect/model.json',
|
||||
|
|
|
@ -2,12 +2,13 @@ const tf = require('@tensorflow/tfjs');
|
|||
const bounding = require('./box');
|
||||
|
||||
class HandDetector {
|
||||
constructor(model, width, height, anchors, iouThreshold, scoreThreshold) {
|
||||
constructor(model, width, height, anchors, iouThreshold, scoreThreshold, maxHands) {
|
||||
this.model = model;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.iouThreshold = iouThreshold;
|
||||
this.scoreThreshold = scoreThreshold;
|
||||
this.maxHands = maxHands;
|
||||
this.anchors = anchors.map((anchor) => [anchor.x_center, anchor.y_center]);
|
||||
this.anchorsTensor = tf.tensor2d(this.anchors);
|
||||
this.inputSizeTensor = tf.tensor1d([width, height]);
|
||||
|
@ -35,28 +36,14 @@ class HandDetector {
|
|||
|
||||
async getBoundingBoxes(input) {
|
||||
const normalizedInput = tf.tidy(() => tf.mul(tf.sub(input, 0.5), 2));
|
||||
let batchedPrediction;
|
||||
if (tf.getBackend() === 'webgl') {
|
||||
// Currently tfjs-core does not pack depthwiseConv because it fails for
|
||||
// very large inputs (https://github.com/tensorflow/tfjs/issues/1652).
|
||||
// TODO(annxingyuan): call tf.enablePackedDepthwiseConv when available
|
||||
// (https://github.com/tensorflow/tfjs/issues/2821)
|
||||
const savedWebglPackDepthwiseConvFlag = tf.env().get('WEBGL_PACK_DEPTHWISECONV');
|
||||
tf.env().set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||
// The model returns a tensor with the following shape:
|
||||
// [1 (batch), 2944 (anchor points), 19 (data for each anchor)]
|
||||
batchedPrediction = this.model.predict(normalizedInput);
|
||||
tf.env().set('WEBGL_PACK_DEPTHWISECONV', savedWebglPackDepthwiseConvFlag);
|
||||
} else {
|
||||
batchedPrediction = this.model.predict(normalizedInput);
|
||||
}
|
||||
const batchedPrediction = this.model.predict(normalizedInput);
|
||||
const prediction = batchedPrediction.squeeze();
|
||||
// Regression score for each anchor point.
|
||||
const scores = tf.tidy(() => tf.sigmoid(tf.slice(prediction, [0, 0], [-1, 1])).squeeze());
|
||||
// Bounding box for each anchor point.
|
||||
const rawBoxes = tf.slice(prediction, [0, 1], [-1, 4]);
|
||||
const boxes = this.normalizeBoxes(rawBoxes);
|
||||
const boxesWithHandsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, 1, this.iouThreshold, this.scoreThreshold);
|
||||
const boxesWithHandsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
||||
const boxesWithHands = await boxesWithHandsTensor.array();
|
||||
const toDispose = [
|
||||
normalizedInput, batchedPrediction, boxesWithHandsTensor, prediction,
|
||||
|
@ -66,15 +53,18 @@ class HandDetector {
|
|||
toDispose.forEach((tensor) => tensor.dispose());
|
||||
return null;
|
||||
}
|
||||
const boxIndex = boxesWithHands[0];
|
||||
const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);
|
||||
const rawPalmLandmarks = tf.slice(prediction, [boxIndex, 5], [1, 14]);
|
||||
const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([
|
||||
-1, 2,
|
||||
]));
|
||||
toDispose.push(rawPalmLandmarks);
|
||||
toDispose.forEach((tensor) => tensor.dispose());
|
||||
return { boxes: matchingBox, palmLandmarks };
|
||||
const detectedHands = tf.tidy(() => {
|
||||
const detectedBoxes = [];
|
||||
for (const i in boxesWithHands) {
|
||||
const boxIndex = boxesWithHands[i];
|
||||
const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);
|
||||
const rawPalmLandmarks = tf.slice(prediction, [boxIndex, 5], [1, 14]);
|
||||
const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));
|
||||
detectedBoxes.push({ boxes: matchingBox, palmLandmarks });
|
||||
}
|
||||
return detectedBoxes;
|
||||
});
|
||||
return detectedHands;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -87,19 +77,21 @@ class HandDetector {
|
|||
const inputHeight = input.shape[1];
|
||||
const inputWidth = input.shape[2];
|
||||
const image = tf.tidy(() => input.resizeBilinear([this.width, this.height]).div(255));
|
||||
const prediction = await this.getBoundingBoxes(image);
|
||||
if (prediction === null) {
|
||||
image.dispose();
|
||||
return null;
|
||||
}
|
||||
const boundingBoxes = await prediction.boxes.array();
|
||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
||||
const palmLandmarks = await prediction.palmLandmarks.array();
|
||||
const predictions = await this.getBoundingBoxes(image);
|
||||
image.dispose();
|
||||
prediction.boxes.dispose();
|
||||
prediction.palmLandmarks.dispose();
|
||||
return bounding.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks }, [inputWidth / this.width, inputHeight / this.height]);
|
||||
if (!predictions || (predictions.length === 0)) return null;
|
||||
const hands = [];
|
||||
for (const i in predictions) {
|
||||
const prediction = predictions[i];
|
||||
const boundingBoxes = await prediction.boxes.array();
|
||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
||||
const palmLandmarks = await prediction.palmLandmarks.array();
|
||||
prediction.boxes.dispose();
|
||||
prediction.palmLandmarks.dispose();
|
||||
hands.push(bounding.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks }, [inputWidth / this.width, inputHeight / this.height]));
|
||||
}
|
||||
return hands;
|
||||
}
|
||||
}
|
||||
exports.HandDetector = HandDetector;
|
|
@ -1,5 +1,5 @@
|
|||
const tf = require('@tensorflow/tfjs');
|
||||
const hand = require('./hand');
|
||||
const hand = require('./handdetector');
|
||||
const keypoints = require('./keypoints');
|
||||
const pipe = require('./pipeline');
|
||||
|
||||
|
@ -47,8 +47,8 @@ async function load(config) {
|
|||
loadHandDetectorModel(config.detector.modelPath),
|
||||
loadHandPoseModel(config.skeleton.modelPath),
|
||||
]);
|
||||
const detector = new hand.HandDetector(handDetectorModel, config.inputSize, config.inputSize, ANCHORS, config.iouThreshold, config.scoreThreshold);
|
||||
const pipeline = new pipe.HandPipeline(detector, handPoseModel, config.inputSize, config.inputSize, config.skipFrames, config.minConfidence);
|
||||
const detector = new hand.HandDetector(handDetectorModel, config.inputSize, config.inputSize, ANCHORS, config.iouThreshold, config.scoreThreshold, config.maxHands);
|
||||
const pipeline = new pipe.HandPipeline(detector, handPoseModel, config.inputSize, config.inputSize, config.skipFrames, config.minConfidence, config.maxHands);
|
||||
// eslint-disable-next-line no-use-before-define
|
||||
const handpose = new HandPose(pipeline);
|
||||
return handpose;
|
||||
|
@ -67,19 +67,24 @@ class HandPose {
|
|||
}
|
||||
return input.toFloat().expandDims(0);
|
||||
});
|
||||
const prediction = await this.pipeline.estimateHand(image, config);
|
||||
const predictions = await this.pipeline.estimateHand(image, config);
|
||||
image.dispose();
|
||||
if (!prediction) return [];
|
||||
const annotations = {};
|
||||
for (const key of Object.keys(keypoints.MESH_ANNOTATIONS)) {
|
||||
annotations[key] = keypoints.MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);
|
||||
const hands = [];
|
||||
if (!predictions) return hands;
|
||||
for (const prediction of predictions) {
|
||||
if (!prediction) return [];
|
||||
const annotations = {};
|
||||
for (const key of Object.keys(keypoints.MESH_ANNOTATIONS)) {
|
||||
annotations[key] = keypoints.MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);
|
||||
}
|
||||
hands.push({
|
||||
confidence: prediction.confidence || 0,
|
||||
box: prediction.box ? [prediction.box.topLeft[0], prediction.box.topLeft[1], prediction.box.bottomRight[0] - prediction.box.topLeft[0], prediction.box.bottomRight[1] - prediction.box.topLeft[1]] : 0,
|
||||
landmarks: prediction.landmarks,
|
||||
annotations,
|
||||
});
|
||||
}
|
||||
return [{
|
||||
confidence: prediction.confidence || 0,
|
||||
box: prediction.box ? [prediction.box.topLeft[0], prediction.box.topLeft[1], prediction.box.bottomRight[0] - prediction.box.topLeft[0], prediction.box.bottomRight[1] - prediction.box.topLeft[1]] : 0,
|
||||
landmarks: prediction.landmarks,
|
||||
annotations,
|
||||
}];
|
||||
return hands;
|
||||
}
|
||||
}
|
||||
exports.HandPose = HandPose;
|
|
@ -13,7 +13,7 @@ const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
|||
|
||||
// The Pipeline coordinates between the bounding box and skeleton models.
|
||||
class HandPipeline {
|
||||
constructor(boundingBoxDetector, meshDetector, meshWidth, meshHeight, maxContinuousChecks, detectionConfidence) {
|
||||
constructor(boundingBoxDetector, meshDetector, meshWidth, meshHeight, maxContinuousChecks, detectionConfidence, maxHands) {
|
||||
// An array of hand bounding boxes.
|
||||
this.regionsOfInterest = [];
|
||||
this.runsWithoutHandDetector = 0;
|
||||
|
@ -21,6 +21,7 @@ class HandPipeline {
|
|||
this.meshDetector = meshDetector;
|
||||
this.maxContinuousChecks = maxContinuousChecks;
|
||||
this.detectionConfidence = detectionConfidence;
|
||||
this.maxHands = maxHands;
|
||||
this.meshWidth = meshWidth;
|
||||
this.meshHeight = meshHeight;
|
||||
this.maxHandsNumber = 1; // TODO(annxingyuan): Add multi-hand support.
|
||||
|
@ -82,72 +83,59 @@ class HandPipeline {
|
|||
async estimateHand(image, config) {
|
||||
const useFreshBox = this.shouldUpdateRegionsOfInterest();
|
||||
if (useFreshBox === true) {
|
||||
const boundingBoxPrediction = await this.boundingBoxDetector.estimateHandBounds(image);
|
||||
if (boundingBoxPrediction === null) {
|
||||
image.dispose();
|
||||
this.regionsOfInterest = [];
|
||||
return null;
|
||||
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image);
|
||||
this.regionsOfInterest = [];
|
||||
for (const i in boundingBoxPredictions) {
|
||||
this.updateRegionsOfInterest(boundingBoxPredictions[i], true /* force update */, i);
|
||||
}
|
||||
this.updateRegionsOfInterest(boundingBoxPrediction, true /* force update */);
|
||||
this.runsWithoutHandDetector = 0;
|
||||
} else {
|
||||
this.runsWithoutHandDetector++;
|
||||
}
|
||||
// Rotate input so the hand is vertically oriented.
|
||||
const currentBox = this.regionsOfInterest[0];
|
||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||
const palmCenter = bounding.getBoxCenter(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
|
||||
const rotatedImage = tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);
|
||||
const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);
|
||||
// The bounding box detector only detects palms, so if we're using a fresh
|
||||
// bounding box prediction, we have to construct the hand bounding box from
|
||||
// the palm keypoints.
|
||||
const box = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = bounding.cutBoxFromImageAndResize(box, rotatedImage, [this.meshWidth, this.meshHeight]);
|
||||
const handImage = croppedInput.div(255);
|
||||
croppedInput.dispose();
|
||||
rotatedImage.dispose();
|
||||
let prediction;
|
||||
if (tf.getBackend() === 'webgl') {
|
||||
// Currently tfjs-core does not pack depthwiseConv because it fails for
|
||||
// very large inputs (https://github.com/tensorflow/tfjs/issues/1652).
|
||||
// TODO(annxingyuan): call tf.enablePackedDepthwiseConv when available
|
||||
// (https://github.com/tensorflow/tfjs/issues/2821)
|
||||
const savedWebglPackDepthwiseConvFlag = tf.env().get('WEBGL_PACK_DEPTHWISECONV');
|
||||
tf.env().set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||
prediction = this.meshDetector.predict(handImage);
|
||||
tf.env().set('WEBGL_PACK_DEPTHWISECONV', savedWebglPackDepthwiseConvFlag);
|
||||
} else {
|
||||
prediction = this.meshDetector.predict(handImage);
|
||||
}
|
||||
const [flag, keypoints] = prediction;
|
||||
handImage.dispose();
|
||||
const flagValue = flag.dataSync()[0];
|
||||
flag.dispose();
|
||||
if (flagValue < config.minConfidence) {
|
||||
const hands = [];
|
||||
if (!this.regionsOfInterest) return hands;
|
||||
for (const i in this.regionsOfInterest) {
|
||||
const currentBox = this.regionsOfInterest[i][0];
|
||||
if (!currentBox) return hands;
|
||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||
const palmCenter = bounding.getBoxCenter(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
|
||||
const rotatedImage = tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);
|
||||
const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);
|
||||
const box = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = bounding.cutBoxFromImageAndResize(box, rotatedImage, [this.meshWidth, this.meshHeight]);
|
||||
const handImage = croppedInput.div(255);
|
||||
croppedInput.dispose();
|
||||
rotatedImage.dispose();
|
||||
const prediction = this.meshDetector.predict(handImage);
|
||||
const [flag, keypoints] = prediction;
|
||||
handImage.dispose();
|
||||
const flagValue = flag.dataSync()[0];
|
||||
flag.dispose();
|
||||
if (flagValue < config.minConfidence) {
|
||||
keypoints.dispose();
|
||||
this.regionsOfInterest[i] = [];
|
||||
return hands;
|
||||
}
|
||||
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
||||
const rawCoords = await keypointsReshaped.array();
|
||||
keypoints.dispose();
|
||||
this.regionsOfInterest = [];
|
||||
return null;
|
||||
keypointsReshaped.dispose();
|
||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords);
|
||||
this.updateRegionsOfInterest(nextBoundingBox, false /* force replace */, i);
|
||||
const result = {
|
||||
landmarks: coords,
|
||||
confidence: flagValue,
|
||||
box: {
|
||||
topLeft: nextBoundingBox.startPoint,
|
||||
bottomRight: nextBoundingBox.endPoint,
|
||||
},
|
||||
};
|
||||
hands.push(result);
|
||||
}
|
||||
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
||||
// Calling arraySync() because the tensor is very small so it's not worth
|
||||
// calling await array().
|
||||
const rawCoords = keypointsReshaped.arraySync();
|
||||
keypoints.dispose();
|
||||
keypointsReshaped.dispose();
|
||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords);
|
||||
this.updateRegionsOfInterest(nextBoundingBox, false /* force replace */);
|
||||
const result = {
|
||||
landmarks: coords,
|
||||
confidence: flagValue,
|
||||
box: {
|
||||
topLeft: nextBoundingBox.startPoint,
|
||||
bottomRight: nextBoundingBox.endPoint,
|
||||
},
|
||||
};
|
||||
return result;
|
||||
return hands;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
|
@ -161,11 +149,11 @@ class HandPipeline {
|
|||
|
||||
// Updates regions of interest if the intersection over union between
|
||||
// the incoming and previous regions falls below a threshold.
|
||||
updateRegionsOfInterest(box, forceUpdate) {
|
||||
updateRegionsOfInterest(box, forceUpdate, index) {
|
||||
if (forceUpdate) {
|
||||
this.regionsOfInterest = [box];
|
||||
this.regionsOfInterest[index] = [box];
|
||||
} else {
|
||||
const previousBox = this.regionsOfInterest[0];
|
||||
const previousBox = this.regionsOfInterest[index][0];
|
||||
let iou = 0;
|
||||
if (previousBox != null && previousBox.startPoint != null) {
|
||||
const [boxStartX, boxStartY] = box.startPoint;
|
||||
|
@ -181,13 +169,12 @@ class HandPipeline {
|
|||
const previousBoxArea = (previousBoxEndX - previousBoxStartX) * (previousBoxEndY - boxStartY);
|
||||
iou = intersection / (boxArea + previousBoxArea - intersection);
|
||||
}
|
||||
this.regionsOfInterest[0] = iou > UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD ? previousBox : box;
|
||||
this.regionsOfInterest[index][0] = iou > UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD ? previousBox : box;
|
||||
}
|
||||
}
|
||||
|
||||
shouldUpdateRegionsOfInterest() {
|
||||
const roisCount = this.regionsOfInterest.length;
|
||||
return roisCount !== this.maxHandsNumber || this.runsWithoutHandDetector >= this.maxContinuousChecks;
|
||||
return (this.regionsOfInterest === 0) || (this.runsWithoutHandDetector >= this.maxContinuousChecks);
|
||||
}
|
||||
}
|
||||
exports.HandPipeline = HandPipeline;
|
||||
|
|
18
src/index.js
18
src/index.js
|
@ -1,8 +1,8 @@
|
|||
const tf = require('@tensorflow/tfjs');
|
||||
const facemesh = require('./facemesh/index.js');
|
||||
const ssrnet = require('./ssrnet/index.js');
|
||||
const posenet = require('./posenet/index.js');
|
||||
const handpose = require('./handpose/index.js');
|
||||
const facemesh = require('./facemesh/facemesh.js');
|
||||
const ssrnet = require('./ssrnet/ssrnet.js');
|
||||
const posenet = require('./posenet/posenet.js');
|
||||
const handpose = require('./handpose/handpose.js');
|
||||
const defaults = require('./config.js').default;
|
||||
|
||||
const models = {
|
||||
|
@ -44,9 +44,15 @@ async function detect(input, userConfig) {
|
|||
|
||||
tf.engine().startScope();
|
||||
|
||||
let savedWebglPackDepthwiseConvFlag;
|
||||
if (tf.getBackend() === 'webgl') {
|
||||
savedWebglPackDepthwiseConvFlag = tf.env().get('WEBGL_PACK_DEPTHWISECONV');
|
||||
tf.env().set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||
}
|
||||
|
||||
// run posenet
|
||||
let poseRes = [];
|
||||
if (config.body.enabled) poseRes = await models.posenet.estimateMultiplePoses(input, config.body);
|
||||
if (config.body.enabled) poseRes = await models.posenet.estimatePoses(input, config.body);
|
||||
|
||||
// run handpose
|
||||
let handRes = [];
|
||||
|
@ -76,6 +82,8 @@ async function detect(input, userConfig) {
|
|||
}
|
||||
}
|
||||
|
||||
tf.env().set('WEBGL_PACK_DEPTHWISECONV', savedWebglPackDepthwiseConvFlag);
|
||||
|
||||
tf.engine().endScope();
|
||||
// combine results
|
||||
resolve({ face: faceRes, body: poseRes, hand: handRes });
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
const kpt = require('./keypoints');
|
||||
const decoders = require('./decoders');
|
||||
/**
|
||||
* Detects a single pose and finds its parts from part scores and offset
|
||||
* vectors. It returns a single pose detection. It works as follows:
|
||||
* argmax2d is done on the scores to get the y and x index in the heatmap
|
||||
* with the highest score for each part, which is essentially where the
|
||||
* part is most likely to exist. This produces a tensor of size 17x2, with
|
||||
* each row being the y and x index in the heatmap for each keypoint.
|
||||
* The offset vector for each for each part is retrieved by getting the
|
||||
* y and x from the offsets corresponding to the y and x index in the
|
||||
* heatmap for that part. This produces a tensor of size 17x2, with each
|
||||
* row being the offset vector for the corresponding keypoint.
|
||||
* To get the keypoint, each part’s heatmap y and x are multiplied
|
||||
* by the output stride then added to their corresponding offset vector,
|
||||
* which is in the same scale as the original image.
|
||||
*
|
||||
* @param heatmapScores 3-D tensor with shape `[height, width, numParts]`.
|
||||
* The value of heatmapScores[y, x, k]` is the score of placing the `k`-th
|
||||
* object part at position `(y, x)`.
|
||||
*
|
||||
* @param offsets 3-D tensor with shape `[height, width, numParts * 2]`.
|
||||
* The value of [offsets[y, x, k], offsets[y, x, k + numParts]]` is the
|
||||
* short range offset vector of the `k`-th object part at heatmap
|
||||
* position `(y, x)`.
|
||||
*
|
||||
* @param outputStride The output stride that was used when feed-forwarding
|
||||
* through the PoseNet model. Must be 32, 16, or 8.
|
||||
*
|
||||
* @return A promise that resolves with single pose with a confidence score,
|
||||
* which contains an array of keypoints indexed by part id, each with a score
|
||||
* and position.
|
||||
*/
|
||||
async function decodeSinglePose(heatmapScores, offsets, outputStride) {
|
||||
let totalScore = 0.0;
|
||||
const heatmapValues = decoders.argmax2d(heatmapScores);
|
||||
const allTensorBuffers = await Promise.all([heatmapScores.buffer(), offsets.buffer(), heatmapValues.buffer()]);
|
||||
const scoresBuffer = allTensorBuffers[0];
|
||||
const offsetsBuffer = allTensorBuffers[1];
|
||||
const heatmapValuesBuffer = allTensorBuffers[2];
|
||||
const offsetPoints = decoders.getOffsetPoints(heatmapValuesBuffer, outputStride, offsetsBuffer);
|
||||
const offsetPointsBuffer = await offsetPoints.buffer();
|
||||
const keypointConfidence = Array.from(decoders.getPointsConfidence(scoresBuffer, heatmapValuesBuffer));
|
||||
const keypoints = keypointConfidence.map((score, keypointId) => {
|
||||
totalScore += score;
|
||||
return {
|
||||
position: {
|
||||
y: offsetPointsBuffer.get(keypointId, 0),
|
||||
x: offsetPointsBuffer.get(keypointId, 1),
|
||||
},
|
||||
part: kpt.partNames[keypointId],
|
||||
score,
|
||||
};
|
||||
});
|
||||
heatmapValues.dispose();
|
||||
offsetPoints.dispose();
|
||||
return { keypoints, score: totalScore / keypoints.length };
|
||||
}
|
||||
exports.decodeSinglePose = decodeSinglePose;
|
|
@ -1,7 +1,6 @@
|
|||
const tf = require('@tensorflow/tfjs');
|
||||
const modelMobileNet = require('./modelMobileNet');
|
||||
const decodeMultiple = require('./decodeMultiple');
|
||||
const decodeSingle = require('./decodeSingle');
|
||||
const util = require('./util');
|
||||
|
||||
class PoseNet {
|
||||
|
@ -30,7 +29,7 @@ class PoseNet {
|
|||
* the corresponding keypoint scores. The positions of the keypoints are
|
||||
* in the same scale as the original image
|
||||
*/
|
||||
async estimateMultiplePoses(input, config) {
|
||||
async estimatePoses(input, config) {
|
||||
const outputStride = this.baseModel.outputStride;
|
||||
const inputResolution = this.inputResolution;
|
||||
const [height, width] = util.getInputTensorDimensions(input);
|
||||
|
@ -51,41 +50,6 @@ class PoseNet {
|
|||
return resultPoses;
|
||||
}
|
||||
|
||||
/**
|
||||
* Infer through PoseNet, and estimates a single pose using the outputs.
|
||||
* This does standard ImageNet pre-processing before inferring through the
|
||||
* model. The image should pixels should have values [0-255]. It detects
|
||||
* multiple poses and finds their parts from part scores and displacement
|
||||
* vectors using a fast greedy decoding algorithm. It returns a single pose
|
||||
*
|
||||
* @param input
|
||||
* ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement) The input
|
||||
* image to feed through the network.
|
||||
*
|
||||
* @param config SinglePersonEstimationConfig object that contains
|
||||
* parameters for the PoseNet inference using single pose estimation.
|
||||
*
|
||||
* @return An pose and its scores, containing keypoints and
|
||||
* the corresponding keypoint scores. The positions of the keypoints are
|
||||
* in the same scale as the original image
|
||||
*/
|
||||
async estimateSinglePose(input) {
|
||||
const outputStride = this.baseModel.outputStride;
|
||||
const inputResolution = this.inputResolution;
|
||||
const [height, width] = util.getInputTensorDimensions(input);
|
||||
const { resized, padding } = util.padAndResizeTo(input, inputResolution);
|
||||
const { heatmapScores, offsets, displacementFwd, displacementBwd } = this.baseModel.predict(resized);
|
||||
const pose = await decodeSingle.decodeSinglePose(heatmapScores, offsets, outputStride);
|
||||
const poses = [pose];
|
||||
const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [inputResolution, inputResolution], padding);
|
||||
heatmapScores.dispose();
|
||||
offsets.dispose();
|
||||
displacementFwd.dispose();
|
||||
displacementBwd.dispose();
|
||||
resized.dispose();
|
||||
return resultPoses[0];
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this.baseModel.dispose();
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
const modelMobileNet = require('./modelMobileNet');
|
||||
const modelPoseNet = require('./modelPoseNet');
|
||||
const decodeMultiple = require('./decodeMultiple');
|
||||
const decodeSingle = require('./decodeSingle');
|
||||
const keypoints = require('./keypoints');
|
||||
const util = require('./util');
|
||||
|
||||
|
@ -10,7 +9,6 @@ exports.PoseNet = modelPoseNet.PoseNet;
|
|||
|
||||
exports.MobileNet = modelMobileNet.MobileNet;
|
||||
exports.decodeMultiplePoses = decodeMultiple.decodeMultiplePoses;
|
||||
exports.decodeSinglePose = decodeSingle.decodeSinglePose;
|
||||
exports.partChannels = keypoints.partChannels;
|
||||
exports.partIds = keypoints.partIds;
|
||||
exports.partNames = keypoints.partNames;
|
Loading…
Reference in New Issue