mirror of https://github.com/vladmandic/human
major performance improvements for all models
parent
bdb22d3f15
commit
7b15efd028
|
@ -4,6 +4,9 @@
|
||||||
export default {
|
export default {
|
||||||
backend: 'webgl', // select tfjs backend to use
|
backend: 'webgl', // select tfjs backend to use
|
||||||
console: true, // enable debugging output to console
|
console: true, // enable debugging output to console
|
||||||
|
async: false, // execute enabled models in parallel
|
||||||
|
// this disables per-model performance data but slightly increases performance
|
||||||
|
// cannot be used if profiling is enabled
|
||||||
profile: false, // enable tfjs profiling
|
profile: false, // enable tfjs profiling
|
||||||
// this has significant performance impact, only enable for debugging purposes
|
// this has significant performance impact, only enable for debugging purposes
|
||||||
// currently only implemented for age,gender,emotion models
|
// currently only implemented for age,gender,emotion models
|
||||||
|
|
|
@ -90,14 +90,18 @@ const log = (...msg) => {
|
||||||
|
|
||||||
// draws processed results and starts processing of a next frame
|
// draws processed results and starts processing of a next frame
|
||||||
function drawResults(input, result, canvas) {
|
function drawResults(input, result, canvas) {
|
||||||
// update fps
|
// update fps data
|
||||||
fps.push(1000 / (performance.now() - timeStamp));
|
fps.push(1000 / (performance.now() - timeStamp));
|
||||||
if (fps.length > ui.maxFrames) fps.shift();
|
if (fps.length > ui.maxFrames) fps.shift();
|
||||||
menu.updateChart('FPS', fps);
|
|
||||||
|
// enable for continous performance monitoring
|
||||||
|
// console.log(result.performance);
|
||||||
|
|
||||||
// eslint-disable-next-line no-use-before-define
|
// eslint-disable-next-line no-use-before-define
|
||||||
requestAnimationFrame(() => runHumanDetect(input, canvas)); // immediate loop
|
requestAnimationFrame(() => runHumanDetect(input, canvas)); // immediate loop before we even draw results
|
||||||
|
|
||||||
|
// draw fps chart
|
||||||
|
menu.updateChart('FPS', fps);
|
||||||
// draw image from video
|
// draw image from video
|
||||||
const ctx = canvas.getContext('2d');
|
const ctx = canvas.getContext('2d');
|
||||||
ctx.fillStyle = ui.baseBackground;
|
ctx.fillStyle = ui.baseBackground;
|
||||||
|
|
|
@ -109,14 +109,14 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
return [prediction, decodedBounds, scoresOut];
|
return [prediction, decodedBounds, scoresOut];
|
||||||
});
|
});
|
||||||
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxIndices = await boxIndicesTensor.array();
|
const boxIndices = boxIndicesTensor.arraySync();
|
||||||
boxIndicesTensor.dispose();
|
boxIndicesTensor.dispose();
|
||||||
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
||||||
const boundingBoxes = await Promise.all(boundingBoxesMap.map(async (boundingBox) => {
|
const boundingBoxes = boundingBoxesMap.map((boundingBox) => {
|
||||||
const vals = await boundingBox.array();
|
const vals = boundingBox.arraySync();
|
||||||
boundingBox.dispose();
|
boundingBox.dispose();
|
||||||
return vals;
|
return vals;
|
||||||
}));
|
});
|
||||||
const annotatedBoxes = [];
|
const annotatedBoxes = [];
|
||||||
for (let i = 0; i < boundingBoxes.length; i++) {
|
for (let i = 0; i < boundingBoxes.length; i++) {
|
||||||
const boundingBox = boundingBoxes[i];
|
const boundingBox = boundingBoxes[i];
|
||||||
|
@ -143,9 +143,12 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
async estimateFaces(input) {
|
async estimateFaces(input) {
|
||||||
const {boxes, scaleFactor} = await this.getBoundingBoxes(input);
|
const {boxes, scaleFactor} = await this.getBoundingBoxes(input);
|
||||||
return Promise.all(boxes.map(async (face) => {
|
const faces = [];
|
||||||
|
for (const face of boxes) {
|
||||||
|
const landmarkData = face.landmarks.arraySync();
|
||||||
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
||||||
const [landmarkData, boxData, probabilityData] = await Promise.all([face.landmarks, scaledBox, face.probability].map(async (d) => d.array()));
|
const boxData = scaleBox.arraySync();
|
||||||
|
const probabilityData = face.probability.arraySync();
|
||||||
const anchor = face.anchor;
|
const anchor = face.anchor;
|
||||||
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
||||||
const scaledLandmarks = landmarkData.map((landmark) => [
|
const scaledLandmarks = landmarkData.map((landmark) => [
|
||||||
|
@ -162,8 +165,9 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
face.landmarks.dispose();
|
face.landmarks.dispose();
|
||||||
face.probability.dispose();
|
face.probability.dispose();
|
||||||
scaledBox.dispose();
|
scaledBox.dispose();
|
||||||
return normalizedFace;
|
faces.push(normalizedFace);
|
||||||
}));
|
}
|
||||||
|
return faces;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
|
@ -3916,7 +3920,7 @@ var require_ssrnet = __commonJS((exports) => {
|
||||||
let ageT;
|
let ageT;
|
||||||
let genderT;
|
let genderT;
|
||||||
const obj = {};
|
const obj = {};
|
||||||
if (!config.profile) {
|
if (!config.profile || config.async) {
|
||||||
if (config.face.age.enabled)
|
if (config.face.age.enabled)
|
||||||
promises.push(ageT = models.age.predict(enhance));
|
promises.push(ageT = models.age.predict(enhance));
|
||||||
if (config.face.gender.enabled)
|
if (config.face.gender.enabled)
|
||||||
|
@ -3933,12 +3937,12 @@ var require_ssrnet = __commonJS((exports) => {
|
||||||
profile2.run("gender", profileGender);
|
profile2.run("gender", profileGender);
|
||||||
}
|
}
|
||||||
if (ageT) {
|
if (ageT) {
|
||||||
const data = await ageT.data();
|
const data = ageT.dataSync();
|
||||||
obj.age = Math.trunc(10 * data[0]) / 10;
|
obj.age = Math.trunc(10 * data[0]) / 10;
|
||||||
tf2.dispose(ageT);
|
tf2.dispose(ageT);
|
||||||
}
|
}
|
||||||
if (genderT) {
|
if (genderT) {
|
||||||
const data = await genderT.data();
|
const data = genderT.dataSync();
|
||||||
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
||||||
if (confidence > config.face.gender.minConfidence) {
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
||||||
|
@ -3993,11 +3997,11 @@ var require_emotion = __commonJS((exports) => {
|
||||||
let data;
|
let data;
|
||||||
if (!config.profile) {
|
if (!config.profile) {
|
||||||
const emotionT = await models.emotion.predict(grayscale);
|
const emotionT = await models.emotion.predict(grayscale);
|
||||||
data = await emotionT.data();
|
data = emotionT.dataSync();
|
||||||
tf2.dispose(emotionT);
|
tf2.dispose(emotionT);
|
||||||
} else {
|
} else {
|
||||||
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
|
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
|
||||||
data = await profileData.result.data();
|
data = profileData.result.dataSync();
|
||||||
profileData.result.dispose();
|
profileData.result.dispose();
|
||||||
profile2.run("emotion", profileData);
|
profile2.run("emotion", profileData);
|
||||||
}
|
}
|
||||||
|
@ -4580,10 +4584,7 @@ var require_box2 = __commonJS((exports) => {
|
||||||
function scaleBoxCoordinates(box, factor) {
|
function scaleBoxCoordinates(box, factor) {
|
||||||
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
||||||
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
||||||
const palmLandmarks = box.palmLandmarks.map((coord) => {
|
const palmLandmarks = box.palmLandmarks.map((coord) => [coord[0] * factor[0], coord[1] * factor[1]]);
|
||||||
const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];
|
|
||||||
return scaledCoord;
|
|
||||||
});
|
|
||||||
return {startPoint, endPoint, palmLandmarks};
|
return {startPoint, endPoint, palmLandmarks};
|
||||||
}
|
}
|
||||||
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
||||||
|
@ -4657,8 +4658,7 @@ var require_handdetector = __commonJS((exports) => {
|
||||||
const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]);
|
const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]);
|
||||||
const boxes = this.normalizeBoxes(rawBoxes);
|
const boxes = this.normalizeBoxes(rawBoxes);
|
||||||
const boxesWithHandsTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
const boxesWithHandsTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxesWithHands = await boxesWithHandsTensor.array();
|
const boxesWithHands = boxesWithHandsTensor.arraySync();
|
||||||
const toDispose = [batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores];
|
|
||||||
const detectedHands = tf2.tidy(() => {
|
const detectedHands = tf2.tidy(() => {
|
||||||
const detectedBoxes = [];
|
const detectedBoxes = [];
|
||||||
for (const i in boxesWithHands) {
|
for (const i in boxesWithHands) {
|
||||||
|
@ -4670,7 +4670,7 @@ var require_handdetector = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
return detectedBoxes;
|
return detectedBoxes;
|
||||||
});
|
});
|
||||||
toDispose.forEach((tensor) => tensor.dispose());
|
[batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores].forEach((tensor) => tensor.dispose());
|
||||||
return detectedHands;
|
return detectedHands;
|
||||||
}
|
}
|
||||||
async estimateHandBounds(input, config) {
|
async estimateHandBounds(input, config) {
|
||||||
|
@ -4678,12 +4678,10 @@ var require_handdetector = __commonJS((exports) => {
|
||||||
this.scoreThreshold = config.scoreThreshold;
|
this.scoreThreshold = config.scoreThreshold;
|
||||||
this.maxHands = config.maxHands;
|
this.maxHands = config.maxHands;
|
||||||
const resized = input.resizeBilinear([this.width, this.height]);
|
const resized = input.resizeBilinear([this.width, this.height]);
|
||||||
const divided = resized.div(255);
|
const divided = resized.mul([1 / 127.5]);
|
||||||
const normalized = divided.sub(0.5);
|
const image = divided.sub(0.5);
|
||||||
const image = normalized.mul(2);
|
|
||||||
resized.dispose();
|
resized.dispose();
|
||||||
divided.dispose();
|
divided.dispose();
|
||||||
normalized.dispose();
|
|
||||||
const predictions = await this.getBoundingBoxes(image);
|
const predictions = await this.getBoundingBoxes(image);
|
||||||
image.dispose();
|
image.dispose();
|
||||||
if (!predictions || predictions.length === 0)
|
if (!predictions || predictions.length === 0)
|
||||||
|
@ -4691,10 +4689,10 @@ var require_handdetector = __commonJS((exports) => {
|
||||||
const hands = [];
|
const hands = [];
|
||||||
for (const i in predictions) {
|
for (const i in predictions) {
|
||||||
const prediction = predictions[i];
|
const prediction = predictions[i];
|
||||||
const boundingBoxes = await prediction.boxes.array();
|
const boundingBoxes = prediction.boxes.dataSync();
|
||||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
const startPoint = [boundingBoxes[0], boundingBoxes[1]];
|
||||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
const endPoint = [boundingBoxes[2], boundingBoxes[3]];
|
||||||
const palmLandmarks = await prediction.palmLandmarks.array();
|
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
||||||
prediction.boxes.dispose();
|
prediction.boxes.dispose();
|
||||||
prediction.palmLandmarks.dispose();
|
prediction.palmLandmarks.dispose();
|
||||||
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
||||||
|
@ -4802,11 +4800,11 @@ var require_pipeline2 = __commonJS((exports) => {
|
||||||
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
||||||
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
||||||
class HandPipeline {
|
class HandPipeline {
|
||||||
constructor(boundingBoxDetector, meshDetector, config) {
|
constructor(boundingBoxDetector, detector, config) {
|
||||||
this.regionsOfInterest = [];
|
this.regionsOfInterest = [];
|
||||||
this.runsWithoutHandDetector = 0;
|
this.runsWithoutHandDetector = 0;
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.detector = detector;
|
||||||
this.meshWidth = config.inputSize;
|
this.meshWidth = config.inputSize;
|
||||||
this.meshHeight = config.inputSize;
|
this.meshHeight = config.inputSize;
|
||||||
this.enlargeFactor = config.enlargeFactor;
|
this.enlargeFactor = config.enlargeFactor;
|
||||||
|
@ -4872,7 +4870,7 @@ var require_pipeline2 = __commonJS((exports) => {
|
||||||
if (!this.regionsOfInterest)
|
if (!this.regionsOfInterest)
|
||||||
return hands;
|
return hands;
|
||||||
for (const i in this.regionsOfInterest) {
|
for (const i in this.regionsOfInterest) {
|
||||||
const currentBox = this.regionsOfInterest[i][0];
|
const currentBox = this.regionsOfInterest[i] ? this.regionsOfInterest[i][0] : null;
|
||||||
if (!currentBox)
|
if (!currentBox)
|
||||||
return hands;
|
return hands;
|
||||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||||
|
@ -4885,18 +4883,18 @@ var require_pipeline2 = __commonJS((exports) => {
|
||||||
const handImage = croppedInput.div(255);
|
const handImage = croppedInput.div(255);
|
||||||
croppedInput.dispose();
|
croppedInput.dispose();
|
||||||
rotatedImage.dispose();
|
rotatedImage.dispose();
|
||||||
const prediction = this.meshDetector.predict(handImage);
|
const prediction = this.detector.predict(handImage);
|
||||||
const [flag, keypoints] = prediction;
|
const [confidence, keypoints] = prediction;
|
||||||
handImage.dispose();
|
handImage.dispose();
|
||||||
const flagValue = flag.dataSync()[0];
|
const confidenceVal = confidence.dataSync()[0];
|
||||||
flag.dispose();
|
confidence.dispose();
|
||||||
if (flagValue < config.minConfidence) {
|
if (confidenceVal < config.minConfidence) {
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
this.regionsOfInterest[i] = [];
|
this.regionsOfInterest[i] = [];
|
||||||
return hands;
|
return hands;
|
||||||
}
|
}
|
||||||
const keypointsReshaped = tf2.reshape(keypoints, [-1, 3]);
|
const keypointsReshaped = tf2.reshape(keypoints, [-1, 3]);
|
||||||
const rawCoords = await keypointsReshaped.array();
|
const rawCoords = keypointsReshaped.arraySync();
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
keypointsReshaped.dispose();
|
keypointsReshaped.dispose();
|
||||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||||
|
@ -4904,7 +4902,7 @@ var require_pipeline2 = __commonJS((exports) => {
|
||||||
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
||||||
const result = {
|
const result = {
|
||||||
landmarks: coords,
|
landmarks: coords,
|
||||||
confidence: flagValue,
|
confidence: confidenceVal,
|
||||||
box: {
|
box: {
|
||||||
topLeft: nextBoundingBox.startPoint,
|
topLeft: nextBoundingBox.startPoint,
|
||||||
bottomRight: nextBoundingBox.endPoint
|
bottomRight: nextBoundingBox.endPoint
|
||||||
|
@ -5729,6 +5727,7 @@ var require_config = __commonJS((exports) => {
|
||||||
var config_default = {
|
var config_default = {
|
||||||
backend: "webgl",
|
backend: "webgl",
|
||||||
console: true,
|
console: true,
|
||||||
|
async: false,
|
||||||
profile: false,
|
profile: false,
|
||||||
deallocate: false,
|
deallocate: false,
|
||||||
scoped: false,
|
scoped: false,
|
||||||
|
@ -5947,10 +5946,12 @@ class Human {
|
||||||
this.version = app.version;
|
this.version = app.version;
|
||||||
this.defaults = defaults;
|
this.defaults = defaults;
|
||||||
this.config = defaults;
|
this.config = defaults;
|
||||||
this.fx = tf.ENV.flags.IS_BROWSER && typeof document !== "undefined" ? new fxImage.Canvas() : null;
|
this.fx = null;
|
||||||
this.state = "idle";
|
this.state = "idle";
|
||||||
this.numTensors = 0;
|
this.numTensors = 0;
|
||||||
this.analyzeMemoryLeaks = false;
|
this.analyzeMemoryLeaks = false;
|
||||||
|
this.inCanvas = null;
|
||||||
|
this.outCanvas = null;
|
||||||
this.models = {
|
this.models = {
|
||||||
facemesh: null,
|
facemesh: null,
|
||||||
posenet: null,
|
posenet: null,
|
||||||
|
@ -6027,12 +6028,14 @@ class Human {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tfImage(input) {
|
tfImage(input) {
|
||||||
let filtered;
|
let tensor;
|
||||||
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
if (input instanceof tf.Tensor) {
|
||||||
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
|
tensor = tf.clone(input);
|
||||||
let targetWidth = originalWidth;
|
} else {
|
||||||
let targetHeight = originalHeight;
|
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
||||||
if (this.fx && this.config.filter.enabled && !(input instanceof tf.Tensor)) {
|
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
|
||||||
|
let targetWidth = originalWidth;
|
||||||
|
let targetHeight = originalHeight;
|
||||||
if (this.config.filter.width > 0)
|
if (this.config.filter.width > 0)
|
||||||
targetWidth = this.config.filter.width;
|
targetWidth = this.config.filter.width;
|
||||||
else if (this.config.filter.height > 0)
|
else if (this.config.filter.height > 0)
|
||||||
|
@ -6041,60 +6044,69 @@ class Human {
|
||||||
targetHeight = this.config.filter.height;
|
targetHeight = this.config.filter.height;
|
||||||
else if (this.config.filter.width > 0)
|
else if (this.config.filter.width > 0)
|
||||||
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||||
const offscreenCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
if (!this.inCanvas || this.inCanvas.width !== originalWidth || this.inCanvas.height !== originalHeight) {
|
||||||
if (offscreenCanvas.width !== targetWidth)
|
this.inCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||||
offscreenCanvas.width = targetWidth;
|
if (this.inCanvas.width !== targetWidth)
|
||||||
if (offscreenCanvas.height !== targetHeight)
|
this.inCanvas.width = targetWidth;
|
||||||
offscreenCanvas.height = targetHeight;
|
if (this.inCanvas.height !== targetHeight)
|
||||||
const ctx = offscreenCanvas.getContext("2d");
|
this.inCanvas.height = targetHeight;
|
||||||
|
}
|
||||||
|
const ctx = this.inCanvas.getContext("2d");
|
||||||
if (input instanceof ImageData)
|
if (input instanceof ImageData)
|
||||||
ctx.putImageData(input, 0, 0);
|
ctx.putImageData(input, 0, 0);
|
||||||
else
|
else
|
||||||
ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, this.inCanvas.width, this.inCanvas.height);
|
||||||
this.fx.reset();
|
if (this.config.filter.enabled) {
|
||||||
this.fx.addFilter("brightness", this.config.filter.brightness);
|
if (!this.outCanvas || this.inCanvas.width !== this.outCanvas.width || this.inCanvas.height !== this.outCanvas.height) {
|
||||||
if (this.config.filter.contrast !== 0)
|
this.outCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(this.inCanvas.width, this.inCanvas.height) : document.createElement("canvas");
|
||||||
this.fx.addFilter("contrast", this.config.filter.contrast);
|
if (this.outCanvas.width !== this.inCanvas.width)
|
||||||
if (this.config.filter.sharpness !== 0)
|
this.outCanvas.width = this.inCanvas.width;
|
||||||
this.fx.addFilter("sharpen", this.config.filter.sharpness);
|
if (this.outCanvas.height !== this.inCanvas.height)
|
||||||
if (this.config.filter.blur !== 0)
|
this.outCanvas.height = this.inCanvas.height;
|
||||||
this.fx.addFilter("blur", this.config.filter.blur);
|
}
|
||||||
if (this.config.filter.saturation !== 0)
|
if (!this.fx)
|
||||||
this.fx.addFilter("saturation", this.config.filter.saturation);
|
this.fx = tf.ENV.flags.IS_BROWSER && typeof document !== "undefined" ? new fxImage.Canvas({canvas: this.outCanvas}) : null;
|
||||||
if (this.config.filter.hue !== 0)
|
this.fx.reset();
|
||||||
this.fx.addFilter("hue", this.config.filter.hue);
|
this.fx.addFilter("brightness", this.config.filter.brightness);
|
||||||
if (this.config.filter.negative)
|
if (this.config.filter.contrast !== 0)
|
||||||
this.fx.addFilter("negative");
|
this.fx.addFilter("contrast", this.config.filter.contrast);
|
||||||
if (this.config.filter.sepia)
|
if (this.config.filter.sharpness !== 0)
|
||||||
this.fx.addFilter("sepia");
|
this.fx.addFilter("sharpen", this.config.filter.sharpness);
|
||||||
if (this.config.filter.vintage)
|
if (this.config.filter.blur !== 0)
|
||||||
this.fx.addFilter("brownie");
|
this.fx.addFilter("blur", this.config.filter.blur);
|
||||||
if (this.config.filter.sepia)
|
if (this.config.filter.saturation !== 0)
|
||||||
this.fx.addFilter("sepia");
|
this.fx.addFilter("saturation", this.config.filter.saturation);
|
||||||
if (this.config.filter.kodachrome)
|
if (this.config.filter.hue !== 0)
|
||||||
this.fx.addFilter("kodachrome");
|
this.fx.addFilter("hue", this.config.filter.hue);
|
||||||
if (this.config.filter.technicolor)
|
if (this.config.filter.negative)
|
||||||
this.fx.addFilter("technicolor");
|
this.fx.addFilter("negative");
|
||||||
if (this.config.filter.polaroid)
|
if (this.config.filter.sepia)
|
||||||
this.fx.addFilter("polaroid");
|
this.fx.addFilter("sepia");
|
||||||
if (this.config.filter.pixelate !== 0)
|
if (this.config.filter.vintage)
|
||||||
this.fx.addFilter("pixelate", this.config.filter.pixelate);
|
this.fx.addFilter("brownie");
|
||||||
filtered = this.fx.apply(offscreenCanvas);
|
if (this.config.filter.sepia)
|
||||||
}
|
this.fx.addFilter("sepia");
|
||||||
let tensor;
|
if (this.config.filter.kodachrome)
|
||||||
if (input instanceof tf.Tensor) {
|
this.fx.addFilter("kodachrome");
|
||||||
tensor = tf.clone(input);
|
if (this.config.filter.technicolor)
|
||||||
} else {
|
this.fx.addFilter("technicolor");
|
||||||
const canvas = filtered || input;
|
if (this.config.filter.polaroid)
|
||||||
|
this.fx.addFilter("polaroid");
|
||||||
|
if (this.config.filter.pixelate !== 0)
|
||||||
|
this.fx.addFilter("pixelate", this.config.filter.pixelate);
|
||||||
|
this.fx.apply(this.inCanvas);
|
||||||
|
}
|
||||||
|
if (!this.outCanvas)
|
||||||
|
this.outCanvas = this.inCanvas;
|
||||||
let pixels;
|
let pixels;
|
||||||
if (this.config.backend === "webgl" || canvas instanceof ImageData) {
|
if (this.config.backend === "webgl" || this.outCanvas instanceof ImageData) {
|
||||||
pixels = tf.browser.fromPixels(canvas);
|
pixels = tf.browser.fromPixels(this.outCanvas);
|
||||||
} else {
|
} else {
|
||||||
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||||
tempCanvas.width = targetWidth;
|
tempCanvas.width = targetWidth;
|
||||||
tempCanvas.height = targetHeight;
|
tempCanvas.height = targetHeight;
|
||||||
const tempCtx = tempCanvas.getContext("2d");
|
const tempCtx = tempCanvas.getContext("2d");
|
||||||
tempCtx.drawImage(canvas, 0, 0);
|
tempCtx.drawImage(this.outCanvas, 0, 0);
|
||||||
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
||||||
pixels = tf.browser.fromPixels(data);
|
pixels = tf.browser.fromPixels(data);
|
||||||
}
|
}
|
||||||
|
@ -6103,7 +6115,7 @@ class Human {
|
||||||
pixels.dispose();
|
pixels.dispose();
|
||||||
casted.dispose();
|
casted.dispose();
|
||||||
}
|
}
|
||||||
return {tensor, canvas: this.config.filter.return ? filtered : null};
|
return {tensor, canvas: this.config.filter.return ? this.outCanvas : null};
|
||||||
}
|
}
|
||||||
async detect(input, userConfig = {}) {
|
async detect(input, userConfig = {}) {
|
||||||
this.state = "config";
|
this.state = "config";
|
||||||
|
@ -6119,6 +6131,10 @@ class Human {
|
||||||
return {error};
|
return {error};
|
||||||
}
|
}
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
let poseRes;
|
||||||
|
let handRes;
|
||||||
|
let ssrRes;
|
||||||
|
let emotionRes;
|
||||||
const timeStart = now();
|
const timeStart = now();
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
await this.checkBackend();
|
await this.checkBackend();
|
||||||
|
@ -6140,18 +6156,28 @@ class Human {
|
||||||
const image = this.tfImage(input);
|
const image = this.tfImage(input);
|
||||||
perf.image = Math.trunc(now() - timeStamp);
|
perf.image = Math.trunc(now() - timeStamp);
|
||||||
const imageTensor = image.tensor;
|
const imageTensor = image.tensor;
|
||||||
this.state = "run:body";
|
if (this.config.async) {
|
||||||
timeStamp = now();
|
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
this.analyze("Start PoseNet");
|
} else {
|
||||||
const poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
this.state = "run:body";
|
||||||
this.analyze("End PoseNet:");
|
timeStamp = now();
|
||||||
perf.body = Math.trunc(now() - timeStamp);
|
this.analyze("Start PoseNet");
|
||||||
this.state = "run:hand";
|
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
timeStamp = now();
|
this.analyze("End PoseNet:");
|
||||||
this.analyze("Start HandPose:");
|
perf.body = Math.trunc(now() - timeStamp);
|
||||||
const handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
}
|
||||||
this.analyze("End HandPose:");
|
if (this.config.async) {
|
||||||
perf.hand = Math.trunc(now() - timeStamp);
|
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
} else {
|
||||||
|
this.state = "run:hand";
|
||||||
|
timeStamp = now();
|
||||||
|
this.analyze("Start HandPose:");
|
||||||
|
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
this.analyze("End HandPose:");
|
||||||
|
perf.hand = Math.trunc(now() - timeStamp);
|
||||||
|
}
|
||||||
|
if (this.config.async)
|
||||||
|
[poseRes, handRes] = await Promise.all([poseRes, handRes]);
|
||||||
const faceRes = [];
|
const faceRes = [];
|
||||||
if (this.config.face.enabled) {
|
if (this.config.face.enabled) {
|
||||||
this.state = "run:face";
|
this.state = "run:face";
|
||||||
|
@ -6166,11 +6192,11 @@ class Human {
|
||||||
}
|
}
|
||||||
this.state = "run:agegender";
|
this.state = "run:agegender";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const ssrData = this.config.face.age.enabled || this.config.face.gender.enabled ? await ssrnet.predict(face.image, this.config) : {};
|
ssrRes = this.config.face.age.enabled || this.config.face.gender.enabled ? await ssrnet.predict(face.image, this.config) : {};
|
||||||
perf.agegender = Math.trunc(now() - timeStamp);
|
perf.agegender = Math.trunc(now() - timeStamp);
|
||||||
this.state = "run:emotion";
|
this.state = "run:emotion";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const emotionData = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
||||||
perf.emotion = Math.trunc(now() - timeStamp);
|
perf.emotion = Math.trunc(now() - timeStamp);
|
||||||
face.image.dispose();
|
face.image.dispose();
|
||||||
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
|
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
|
||||||
|
@ -6179,10 +6205,10 @@ class Human {
|
||||||
box: face.box,
|
box: face.box,
|
||||||
mesh: face.mesh,
|
mesh: face.mesh,
|
||||||
annotations: face.annotations,
|
annotations: face.annotations,
|
||||||
age: ssrData.age,
|
age: ssrRes.age,
|
||||||
gender: ssrData.gender,
|
gender: ssrRes.gender,
|
||||||
agConfidence: ssrData.confidence,
|
agConfidence: ssrRes.confidence,
|
||||||
emotion: emotionData,
|
emotion: emotionRes,
|
||||||
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
|
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
|
||||||
});
|
});
|
||||||
this.analyze("End FaceMesh:");
|
this.analyze("End FaceMesh:");
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 7075,
|
"bytes": 7322,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
|
@ -9,7 +9,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 1958,
|
"bytes": 1954,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/profile.js"
|
"path": "src/profile.js"
|
||||||
|
@ -17,7 +17,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/facemesh/blazeface.js": {
|
"src/facemesh/blazeface.js": {
|
||||||
"bytes": 7161,
|
"bytes": 6991,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/facemesh/box.js": {
|
"src/facemesh/box.js": {
|
||||||
|
@ -75,11 +75,11 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/box.js": {
|
"src/handpose/box.js": {
|
||||||
"bytes": 2627,
|
"bytes": 2572,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/handdetector.js": {
|
"src/handpose/handdetector.js": {
|
||||||
"bytes": 4248,
|
"bytes": 4077,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/handpose/box.js"
|
"path": "src/handpose/box.js"
|
||||||
|
@ -105,7 +105,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytes": 8169,
|
"bytes": 8216,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/handpose/box.js"
|
"path": "src/handpose/box.js"
|
||||||
|
@ -120,7 +120,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 13768,
|
"bytes": 15047,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/facemesh/facemesh.js"
|
"path": "src/facemesh/facemesh.js"
|
||||||
|
@ -263,7 +263,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytes": 2115,
|
"bytes": 2127,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/profile.js"
|
"path": "src/profile.js"
|
||||||
|
@ -275,13 +275,13 @@
|
||||||
"dist/human.esm-nobundle.js.map": {
|
"dist/human.esm-nobundle.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 254548
|
"bytes": 256450
|
||||||
},
|
},
|
||||||
"dist/human.esm-nobundle.js": {
|
"dist/human.esm-nobundle.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"src/facemesh/blazeface.js": {
|
"src/facemesh/blazeface.js": {
|
||||||
"bytesInOutput": 7121
|
"bytesInOutput": 7133
|
||||||
},
|
},
|
||||||
"src/facemesh/keypoints.js": {
|
"src/facemesh/keypoints.js": {
|
||||||
"bytesInOutput": 2768
|
"bytesInOutput": 2768
|
||||||
|
@ -308,10 +308,10 @@
|
||||||
"bytesInOutput": 1092
|
"bytesInOutput": 1092
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytesInOutput": 2310
|
"bytesInOutput": 2322
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 2044
|
"bytesInOutput": 2040
|
||||||
},
|
},
|
||||||
"src/posenet/modelBase.js": {
|
"src/posenet/modelBase.js": {
|
||||||
"bytesInOutput": 910
|
"bytesInOutput": 910
|
||||||
|
@ -347,10 +347,10 @@
|
||||||
"bytesInOutput": 903
|
"bytesInOutput": 903
|
||||||
},
|
},
|
||||||
"src/handpose/box.js": {
|
"src/handpose/box.js": {
|
||||||
"bytesInOutput": 2805
|
"bytesInOutput": 2744
|
||||||
},
|
},
|
||||||
"src/handpose/handdetector.js": {
|
"src/handpose/handdetector.js": {
|
||||||
"bytesInOutput": 4117
|
"bytesInOutput": 4027
|
||||||
},
|
},
|
||||||
"src/handpose/keypoints.js": {
|
"src/handpose/keypoints.js": {
|
||||||
"bytesInOutput": 263
|
"bytesInOutput": 263
|
||||||
|
@ -359,7 +359,7 @@
|
||||||
"bytesInOutput": 2663
|
"bytesInOutput": 2663
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytesInOutput": 7604
|
"bytesInOutput": 7651
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytesInOutput": 2270
|
"bytesInOutput": 2270
|
||||||
|
@ -368,19 +368,19 @@
|
||||||
"bytesInOutput": 20137
|
"bytesInOutput": 20137
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 2273
|
"bytesInOutput": 2291
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 3012
|
"bytesInOutput": 3012
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 11904
|
"bytesInOutput": 13241
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 158147
|
"bytes": 159418
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -67158,14 +67158,14 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
return [prediction, decodedBounds, scoresOut];
|
return [prediction, decodedBounds, scoresOut];
|
||||||
});
|
});
|
||||||
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxIndices = await boxIndicesTensor.array();
|
const boxIndices = boxIndicesTensor.arraySync();
|
||||||
boxIndicesTensor.dispose();
|
boxIndicesTensor.dispose();
|
||||||
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
||||||
const boundingBoxes = await Promise.all(boundingBoxesMap.map(async (boundingBox) => {
|
const boundingBoxes = boundingBoxesMap.map((boundingBox) => {
|
||||||
const vals = await boundingBox.array();
|
const vals = boundingBox.arraySync();
|
||||||
boundingBox.dispose();
|
boundingBox.dispose();
|
||||||
return vals;
|
return vals;
|
||||||
}));
|
});
|
||||||
const annotatedBoxes = [];
|
const annotatedBoxes = [];
|
||||||
for (let i = 0; i < boundingBoxes.length; i++) {
|
for (let i = 0; i < boundingBoxes.length; i++) {
|
||||||
const boundingBox = boundingBoxes[i];
|
const boundingBox = boundingBoxes[i];
|
||||||
|
@ -67192,9 +67192,12 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
async estimateFaces(input) {
|
async estimateFaces(input) {
|
||||||
const {boxes, scaleFactor} = await this.getBoundingBoxes(input);
|
const {boxes, scaleFactor} = await this.getBoundingBoxes(input);
|
||||||
return Promise.all(boxes.map(async (face) => {
|
const faces = [];
|
||||||
|
for (const face of boxes) {
|
||||||
|
const landmarkData = face.landmarks.arraySync();
|
||||||
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
||||||
const [landmarkData, boxData, probabilityData] = await Promise.all([face.landmarks, scaledBox, face.probability].map(async (d) => d.array()));
|
const boxData = scaleBox.arraySync();
|
||||||
|
const probabilityData = face.probability.arraySync();
|
||||||
const anchor = face.anchor;
|
const anchor = face.anchor;
|
||||||
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
||||||
const scaledLandmarks = landmarkData.map((landmark) => [
|
const scaledLandmarks = landmarkData.map((landmark) => [
|
||||||
|
@ -67211,8 +67214,9 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
face.landmarks.dispose();
|
face.landmarks.dispose();
|
||||||
face.probability.dispose();
|
face.probability.dispose();
|
||||||
scaledBox.dispose();
|
scaledBox.dispose();
|
||||||
return normalizedFace;
|
faces.push(normalizedFace);
|
||||||
}));
|
}
|
||||||
|
return faces;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
|
@ -70965,7 +70969,7 @@ var require_ssrnet = __commonJS((exports) => {
|
||||||
let ageT;
|
let ageT;
|
||||||
let genderT;
|
let genderT;
|
||||||
const obj = {};
|
const obj = {};
|
||||||
if (!config.profile) {
|
if (!config.profile || config.async) {
|
||||||
if (config.face.age.enabled)
|
if (config.face.age.enabled)
|
||||||
promises.push(ageT = models.age.predict(enhance));
|
promises.push(ageT = models.age.predict(enhance));
|
||||||
if (config.face.gender.enabled)
|
if (config.face.gender.enabled)
|
||||||
|
@ -70982,12 +70986,12 @@ var require_ssrnet = __commonJS((exports) => {
|
||||||
profile2.run("gender", profileGender);
|
profile2.run("gender", profileGender);
|
||||||
}
|
}
|
||||||
if (ageT) {
|
if (ageT) {
|
||||||
const data = await ageT.data();
|
const data = ageT.dataSync();
|
||||||
obj.age = Math.trunc(10 * data[0]) / 10;
|
obj.age = Math.trunc(10 * data[0]) / 10;
|
||||||
tf2.dispose(ageT);
|
tf2.dispose(ageT);
|
||||||
}
|
}
|
||||||
if (genderT) {
|
if (genderT) {
|
||||||
const data = await genderT.data();
|
const data = genderT.dataSync();
|
||||||
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
||||||
if (confidence > config.face.gender.minConfidence) {
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
||||||
|
@ -71042,11 +71046,11 @@ var require_emotion = __commonJS((exports) => {
|
||||||
let data;
|
let data;
|
||||||
if (!config.profile) {
|
if (!config.profile) {
|
||||||
const emotionT = await models.emotion.predict(grayscale);
|
const emotionT = await models.emotion.predict(grayscale);
|
||||||
data = await emotionT.data();
|
data = emotionT.dataSync();
|
||||||
tf2.dispose(emotionT);
|
tf2.dispose(emotionT);
|
||||||
} else {
|
} else {
|
||||||
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
|
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
|
||||||
data = await profileData.result.data();
|
data = profileData.result.dataSync();
|
||||||
profileData.result.dispose();
|
profileData.result.dispose();
|
||||||
profile2.run("emotion", profileData);
|
profile2.run("emotion", profileData);
|
||||||
}
|
}
|
||||||
|
@ -71629,10 +71633,7 @@ var require_box2 = __commonJS((exports) => {
|
||||||
function scaleBoxCoordinates(box, factor) {
|
function scaleBoxCoordinates(box, factor) {
|
||||||
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
||||||
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
||||||
const palmLandmarks = box.palmLandmarks.map((coord) => {
|
const palmLandmarks = box.palmLandmarks.map((coord) => [coord[0] * factor[0], coord[1] * factor[1]]);
|
||||||
const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];
|
|
||||||
return scaledCoord;
|
|
||||||
});
|
|
||||||
return {startPoint, endPoint, palmLandmarks};
|
return {startPoint, endPoint, palmLandmarks};
|
||||||
}
|
}
|
||||||
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
||||||
|
@ -71706,8 +71707,7 @@ var require_handdetector = __commonJS((exports) => {
|
||||||
const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]);
|
const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]);
|
||||||
const boxes = this.normalizeBoxes(rawBoxes);
|
const boxes = this.normalizeBoxes(rawBoxes);
|
||||||
const boxesWithHandsTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
const boxesWithHandsTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxesWithHands = await boxesWithHandsTensor.array();
|
const boxesWithHands = boxesWithHandsTensor.arraySync();
|
||||||
const toDispose = [batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores];
|
|
||||||
const detectedHands = tf2.tidy(() => {
|
const detectedHands = tf2.tidy(() => {
|
||||||
const detectedBoxes = [];
|
const detectedBoxes = [];
|
||||||
for (const i in boxesWithHands) {
|
for (const i in boxesWithHands) {
|
||||||
|
@ -71719,7 +71719,7 @@ var require_handdetector = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
return detectedBoxes;
|
return detectedBoxes;
|
||||||
});
|
});
|
||||||
toDispose.forEach((tensor) => tensor.dispose());
|
[batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores].forEach((tensor) => tensor.dispose());
|
||||||
return detectedHands;
|
return detectedHands;
|
||||||
}
|
}
|
||||||
async estimateHandBounds(input, config) {
|
async estimateHandBounds(input, config) {
|
||||||
|
@ -71727,12 +71727,10 @@ var require_handdetector = __commonJS((exports) => {
|
||||||
this.scoreThreshold = config.scoreThreshold;
|
this.scoreThreshold = config.scoreThreshold;
|
||||||
this.maxHands = config.maxHands;
|
this.maxHands = config.maxHands;
|
||||||
const resized = input.resizeBilinear([this.width, this.height]);
|
const resized = input.resizeBilinear([this.width, this.height]);
|
||||||
const divided = resized.div(255);
|
const divided = resized.mul([1 / 127.5]);
|
||||||
const normalized = divided.sub(0.5);
|
const image = divided.sub(0.5);
|
||||||
const image = normalized.mul(2);
|
|
||||||
resized.dispose();
|
resized.dispose();
|
||||||
divided.dispose();
|
divided.dispose();
|
||||||
normalized.dispose();
|
|
||||||
const predictions = await this.getBoundingBoxes(image);
|
const predictions = await this.getBoundingBoxes(image);
|
||||||
image.dispose();
|
image.dispose();
|
||||||
if (!predictions || predictions.length === 0)
|
if (!predictions || predictions.length === 0)
|
||||||
|
@ -71740,10 +71738,10 @@ var require_handdetector = __commonJS((exports) => {
|
||||||
const hands = [];
|
const hands = [];
|
||||||
for (const i in predictions) {
|
for (const i in predictions) {
|
||||||
const prediction = predictions[i];
|
const prediction = predictions[i];
|
||||||
const boundingBoxes = await prediction.boxes.array();
|
const boundingBoxes = prediction.boxes.dataSync();
|
||||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
const startPoint = [boundingBoxes[0], boundingBoxes[1]];
|
||||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
const endPoint = [boundingBoxes[2], boundingBoxes[3]];
|
||||||
const palmLandmarks = await prediction.palmLandmarks.array();
|
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
||||||
prediction.boxes.dispose();
|
prediction.boxes.dispose();
|
||||||
prediction.palmLandmarks.dispose();
|
prediction.palmLandmarks.dispose();
|
||||||
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
||||||
|
@ -71851,11 +71849,11 @@ var require_pipeline2 = __commonJS((exports) => {
|
||||||
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
||||||
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
||||||
class HandPipeline {
|
class HandPipeline {
|
||||||
constructor(boundingBoxDetector, meshDetector, config) {
|
constructor(boundingBoxDetector, detector, config) {
|
||||||
this.regionsOfInterest = [];
|
this.regionsOfInterest = [];
|
||||||
this.runsWithoutHandDetector = 0;
|
this.runsWithoutHandDetector = 0;
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.detector = detector;
|
||||||
this.meshWidth = config.inputSize;
|
this.meshWidth = config.inputSize;
|
||||||
this.meshHeight = config.inputSize;
|
this.meshHeight = config.inputSize;
|
||||||
this.enlargeFactor = config.enlargeFactor;
|
this.enlargeFactor = config.enlargeFactor;
|
||||||
|
@ -71921,7 +71919,7 @@ var require_pipeline2 = __commonJS((exports) => {
|
||||||
if (!this.regionsOfInterest)
|
if (!this.regionsOfInterest)
|
||||||
return hands;
|
return hands;
|
||||||
for (const i in this.regionsOfInterest) {
|
for (const i in this.regionsOfInterest) {
|
||||||
const currentBox = this.regionsOfInterest[i][0];
|
const currentBox = this.regionsOfInterest[i] ? this.regionsOfInterest[i][0] : null;
|
||||||
if (!currentBox)
|
if (!currentBox)
|
||||||
return hands;
|
return hands;
|
||||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||||
|
@ -71934,18 +71932,18 @@ var require_pipeline2 = __commonJS((exports) => {
|
||||||
const handImage = croppedInput.div(255);
|
const handImage = croppedInput.div(255);
|
||||||
croppedInput.dispose();
|
croppedInput.dispose();
|
||||||
rotatedImage.dispose();
|
rotatedImage.dispose();
|
||||||
const prediction = this.meshDetector.predict(handImage);
|
const prediction = this.detector.predict(handImage);
|
||||||
const [flag, keypoints] = prediction;
|
const [confidence, keypoints] = prediction;
|
||||||
handImage.dispose();
|
handImage.dispose();
|
||||||
const flagValue = flag.dataSync()[0];
|
const confidenceVal = confidence.dataSync()[0];
|
||||||
flag.dispose();
|
confidence.dispose();
|
||||||
if (flagValue < config.minConfidence) {
|
if (confidenceVal < config.minConfidence) {
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
this.regionsOfInterest[i] = [];
|
this.regionsOfInterest[i] = [];
|
||||||
return hands;
|
return hands;
|
||||||
}
|
}
|
||||||
const keypointsReshaped = tf2.reshape(keypoints, [-1, 3]);
|
const keypointsReshaped = tf2.reshape(keypoints, [-1, 3]);
|
||||||
const rawCoords = await keypointsReshaped.array();
|
const rawCoords = keypointsReshaped.arraySync();
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
keypointsReshaped.dispose();
|
keypointsReshaped.dispose();
|
||||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||||
|
@ -71953,7 +71951,7 @@ var require_pipeline2 = __commonJS((exports) => {
|
||||||
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
||||||
const result = {
|
const result = {
|
||||||
landmarks: coords,
|
landmarks: coords,
|
||||||
confidence: flagValue,
|
confidence: confidenceVal,
|
||||||
box: {
|
box: {
|
||||||
topLeft: nextBoundingBox.startPoint,
|
topLeft: nextBoundingBox.startPoint,
|
||||||
bottomRight: nextBoundingBox.endPoint
|
bottomRight: nextBoundingBox.endPoint
|
||||||
|
@ -72778,6 +72776,7 @@ var require_config = __commonJS((exports) => {
|
||||||
var config_default = {
|
var config_default = {
|
||||||
backend: "webgl",
|
backend: "webgl",
|
||||||
console: true,
|
console: true,
|
||||||
|
async: false,
|
||||||
profile: false,
|
profile: false,
|
||||||
deallocate: false,
|
deallocate: false,
|
||||||
scoped: false,
|
scoped: false,
|
||||||
|
@ -72996,10 +72995,12 @@ class Human {
|
||||||
this.version = app.version;
|
this.version = app.version;
|
||||||
this.defaults = defaults;
|
this.defaults = defaults;
|
||||||
this.config = defaults;
|
this.config = defaults;
|
||||||
this.fx = tf.ENV.flags.IS_BROWSER && typeof document !== "undefined" ? new fxImage.Canvas() : null;
|
this.fx = null;
|
||||||
this.state = "idle";
|
this.state = "idle";
|
||||||
this.numTensors = 0;
|
this.numTensors = 0;
|
||||||
this.analyzeMemoryLeaks = false;
|
this.analyzeMemoryLeaks = false;
|
||||||
|
this.inCanvas = null;
|
||||||
|
this.outCanvas = null;
|
||||||
this.models = {
|
this.models = {
|
||||||
facemesh: null,
|
facemesh: null,
|
||||||
posenet: null,
|
posenet: null,
|
||||||
|
@ -73076,12 +73077,14 @@ class Human {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tfImage(input) {
|
tfImage(input) {
|
||||||
let filtered;
|
let tensor;
|
||||||
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
if (input instanceof tf.Tensor) {
|
||||||
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
|
tensor = tf.clone(input);
|
||||||
let targetWidth = originalWidth;
|
} else {
|
||||||
let targetHeight = originalHeight;
|
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
||||||
if (this.fx && this.config.filter.enabled && !(input instanceof tf.Tensor)) {
|
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
|
||||||
|
let targetWidth = originalWidth;
|
||||||
|
let targetHeight = originalHeight;
|
||||||
if (this.config.filter.width > 0)
|
if (this.config.filter.width > 0)
|
||||||
targetWidth = this.config.filter.width;
|
targetWidth = this.config.filter.width;
|
||||||
else if (this.config.filter.height > 0)
|
else if (this.config.filter.height > 0)
|
||||||
|
@ -73090,60 +73093,69 @@ class Human {
|
||||||
targetHeight = this.config.filter.height;
|
targetHeight = this.config.filter.height;
|
||||||
else if (this.config.filter.width > 0)
|
else if (this.config.filter.width > 0)
|
||||||
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||||
const offscreenCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
if (!this.inCanvas || this.inCanvas.width !== originalWidth || this.inCanvas.height !== originalHeight) {
|
||||||
if (offscreenCanvas.width !== targetWidth)
|
this.inCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||||
offscreenCanvas.width = targetWidth;
|
if (this.inCanvas.width !== targetWidth)
|
||||||
if (offscreenCanvas.height !== targetHeight)
|
this.inCanvas.width = targetWidth;
|
||||||
offscreenCanvas.height = targetHeight;
|
if (this.inCanvas.height !== targetHeight)
|
||||||
const ctx = offscreenCanvas.getContext("2d");
|
this.inCanvas.height = targetHeight;
|
||||||
|
}
|
||||||
|
const ctx = this.inCanvas.getContext("2d");
|
||||||
if (input instanceof ImageData)
|
if (input instanceof ImageData)
|
||||||
ctx.putImageData(input, 0, 0);
|
ctx.putImageData(input, 0, 0);
|
||||||
else
|
else
|
||||||
ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, this.inCanvas.width, this.inCanvas.height);
|
||||||
this.fx.reset();
|
if (this.config.filter.enabled) {
|
||||||
this.fx.addFilter("brightness", this.config.filter.brightness);
|
if (!this.outCanvas || this.inCanvas.width !== this.outCanvas.width || this.inCanvas.height !== this.outCanvas.height) {
|
||||||
if (this.config.filter.contrast !== 0)
|
this.outCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(this.inCanvas.width, this.inCanvas.height) : document.createElement("canvas");
|
||||||
this.fx.addFilter("contrast", this.config.filter.contrast);
|
if (this.outCanvas.width !== this.inCanvas.width)
|
||||||
if (this.config.filter.sharpness !== 0)
|
this.outCanvas.width = this.inCanvas.width;
|
||||||
this.fx.addFilter("sharpen", this.config.filter.sharpness);
|
if (this.outCanvas.height !== this.inCanvas.height)
|
||||||
if (this.config.filter.blur !== 0)
|
this.outCanvas.height = this.inCanvas.height;
|
||||||
this.fx.addFilter("blur", this.config.filter.blur);
|
}
|
||||||
if (this.config.filter.saturation !== 0)
|
if (!this.fx)
|
||||||
this.fx.addFilter("saturation", this.config.filter.saturation);
|
this.fx = tf.ENV.flags.IS_BROWSER && typeof document !== "undefined" ? new fxImage.Canvas({canvas: this.outCanvas}) : null;
|
||||||
if (this.config.filter.hue !== 0)
|
this.fx.reset();
|
||||||
this.fx.addFilter("hue", this.config.filter.hue);
|
this.fx.addFilter("brightness", this.config.filter.brightness);
|
||||||
if (this.config.filter.negative)
|
if (this.config.filter.contrast !== 0)
|
||||||
this.fx.addFilter("negative");
|
this.fx.addFilter("contrast", this.config.filter.contrast);
|
||||||
if (this.config.filter.sepia)
|
if (this.config.filter.sharpness !== 0)
|
||||||
this.fx.addFilter("sepia");
|
this.fx.addFilter("sharpen", this.config.filter.sharpness);
|
||||||
if (this.config.filter.vintage)
|
if (this.config.filter.blur !== 0)
|
||||||
this.fx.addFilter("brownie");
|
this.fx.addFilter("blur", this.config.filter.blur);
|
||||||
if (this.config.filter.sepia)
|
if (this.config.filter.saturation !== 0)
|
||||||
this.fx.addFilter("sepia");
|
this.fx.addFilter("saturation", this.config.filter.saturation);
|
||||||
if (this.config.filter.kodachrome)
|
if (this.config.filter.hue !== 0)
|
||||||
this.fx.addFilter("kodachrome");
|
this.fx.addFilter("hue", this.config.filter.hue);
|
||||||
if (this.config.filter.technicolor)
|
if (this.config.filter.negative)
|
||||||
this.fx.addFilter("technicolor");
|
this.fx.addFilter("negative");
|
||||||
if (this.config.filter.polaroid)
|
if (this.config.filter.sepia)
|
||||||
this.fx.addFilter("polaroid");
|
this.fx.addFilter("sepia");
|
||||||
if (this.config.filter.pixelate !== 0)
|
if (this.config.filter.vintage)
|
||||||
this.fx.addFilter("pixelate", this.config.filter.pixelate);
|
this.fx.addFilter("brownie");
|
||||||
filtered = this.fx.apply(offscreenCanvas);
|
if (this.config.filter.sepia)
|
||||||
}
|
this.fx.addFilter("sepia");
|
||||||
let tensor;
|
if (this.config.filter.kodachrome)
|
||||||
if (input instanceof tf.Tensor) {
|
this.fx.addFilter("kodachrome");
|
||||||
tensor = tf.clone(input);
|
if (this.config.filter.technicolor)
|
||||||
} else {
|
this.fx.addFilter("technicolor");
|
||||||
const canvas = filtered || input;
|
if (this.config.filter.polaroid)
|
||||||
|
this.fx.addFilter("polaroid");
|
||||||
|
if (this.config.filter.pixelate !== 0)
|
||||||
|
this.fx.addFilter("pixelate", this.config.filter.pixelate);
|
||||||
|
this.fx.apply(this.inCanvas);
|
||||||
|
}
|
||||||
|
if (!this.outCanvas)
|
||||||
|
this.outCanvas = this.inCanvas;
|
||||||
let pixels;
|
let pixels;
|
||||||
if (this.config.backend === "webgl" || canvas instanceof ImageData) {
|
if (this.config.backend === "webgl" || this.outCanvas instanceof ImageData) {
|
||||||
pixels = tf.browser.fromPixels(canvas);
|
pixels = tf.browser.fromPixels(this.outCanvas);
|
||||||
} else {
|
} else {
|
||||||
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||||
tempCanvas.width = targetWidth;
|
tempCanvas.width = targetWidth;
|
||||||
tempCanvas.height = targetHeight;
|
tempCanvas.height = targetHeight;
|
||||||
const tempCtx = tempCanvas.getContext("2d");
|
const tempCtx = tempCanvas.getContext("2d");
|
||||||
tempCtx.drawImage(canvas, 0, 0);
|
tempCtx.drawImage(this.outCanvas, 0, 0);
|
||||||
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
||||||
pixels = tf.browser.fromPixels(data);
|
pixels = tf.browser.fromPixels(data);
|
||||||
}
|
}
|
||||||
|
@ -73152,7 +73164,7 @@ class Human {
|
||||||
pixels.dispose();
|
pixels.dispose();
|
||||||
casted.dispose();
|
casted.dispose();
|
||||||
}
|
}
|
||||||
return {tensor, canvas: this.config.filter.return ? filtered : null};
|
return {tensor, canvas: this.config.filter.return ? this.outCanvas : null};
|
||||||
}
|
}
|
||||||
async detect(input, userConfig = {}) {
|
async detect(input, userConfig = {}) {
|
||||||
this.state = "config";
|
this.state = "config";
|
||||||
|
@ -73168,6 +73180,10 @@ class Human {
|
||||||
return {error};
|
return {error};
|
||||||
}
|
}
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
let poseRes;
|
||||||
|
let handRes;
|
||||||
|
let ssrRes;
|
||||||
|
let emotionRes;
|
||||||
const timeStart = now();
|
const timeStart = now();
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
await this.checkBackend();
|
await this.checkBackend();
|
||||||
|
@ -73189,18 +73205,28 @@ class Human {
|
||||||
const image = this.tfImage(input);
|
const image = this.tfImage(input);
|
||||||
perf.image = Math.trunc(now() - timeStamp);
|
perf.image = Math.trunc(now() - timeStamp);
|
||||||
const imageTensor = image.tensor;
|
const imageTensor = image.tensor;
|
||||||
this.state = "run:body";
|
if (this.config.async) {
|
||||||
timeStamp = now();
|
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
this.analyze("Start PoseNet");
|
} else {
|
||||||
const poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
this.state = "run:body";
|
||||||
this.analyze("End PoseNet:");
|
timeStamp = now();
|
||||||
perf.body = Math.trunc(now() - timeStamp);
|
this.analyze("Start PoseNet");
|
||||||
this.state = "run:hand";
|
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
timeStamp = now();
|
this.analyze("End PoseNet:");
|
||||||
this.analyze("Start HandPose:");
|
perf.body = Math.trunc(now() - timeStamp);
|
||||||
const handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
}
|
||||||
this.analyze("End HandPose:");
|
if (this.config.async) {
|
||||||
perf.hand = Math.trunc(now() - timeStamp);
|
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
} else {
|
||||||
|
this.state = "run:hand";
|
||||||
|
timeStamp = now();
|
||||||
|
this.analyze("Start HandPose:");
|
||||||
|
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
this.analyze("End HandPose:");
|
||||||
|
perf.hand = Math.trunc(now() - timeStamp);
|
||||||
|
}
|
||||||
|
if (this.config.async)
|
||||||
|
[poseRes, handRes] = await Promise.all([poseRes, handRes]);
|
||||||
const faceRes = [];
|
const faceRes = [];
|
||||||
if (this.config.face.enabled) {
|
if (this.config.face.enabled) {
|
||||||
this.state = "run:face";
|
this.state = "run:face";
|
||||||
|
@ -73215,11 +73241,11 @@ class Human {
|
||||||
}
|
}
|
||||||
this.state = "run:agegender";
|
this.state = "run:agegender";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const ssrData = this.config.face.age.enabled || this.config.face.gender.enabled ? await ssrnet.predict(face.image, this.config) : {};
|
ssrRes = this.config.face.age.enabled || this.config.face.gender.enabled ? await ssrnet.predict(face.image, this.config) : {};
|
||||||
perf.agegender = Math.trunc(now() - timeStamp);
|
perf.agegender = Math.trunc(now() - timeStamp);
|
||||||
this.state = "run:emotion";
|
this.state = "run:emotion";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const emotionData = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
||||||
perf.emotion = Math.trunc(now() - timeStamp);
|
perf.emotion = Math.trunc(now() - timeStamp);
|
||||||
face.image.dispose();
|
face.image.dispose();
|
||||||
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
|
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
|
||||||
|
@ -73228,10 +73254,10 @@ class Human {
|
||||||
box: face.box,
|
box: face.box,
|
||||||
mesh: face.mesh,
|
mesh: face.mesh,
|
||||||
annotations: face.annotations,
|
annotations: face.annotations,
|
||||||
age: ssrData.age,
|
age: ssrRes.age,
|
||||||
gender: ssrData.gender,
|
gender: ssrRes.gender,
|
||||||
agConfidence: ssrData.confidence,
|
agConfidence: ssrRes.confidence,
|
||||||
emotion: emotionData,
|
emotion: emotionRes,
|
||||||
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
|
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
|
||||||
});
|
});
|
||||||
this.analyze("End FaceMesh:");
|
this.analyze("End FaceMesh:");
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 7075,
|
"bytes": 7322,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
||||||
|
@ -153,7 +153,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 1958,
|
"bytes": 1954,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -164,7 +164,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/facemesh/blazeface.js": {
|
"src/facemesh/blazeface.js": {
|
||||||
"bytes": 7161,
|
"bytes": 6991,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -236,7 +236,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/box.js": {
|
"src/handpose/box.js": {
|
||||||
"bytes": 2627,
|
"bytes": 2572,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -244,7 +244,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/handpose/handdetector.js": {
|
"src/handpose/handdetector.js": {
|
||||||
"bytes": 4248,
|
"bytes": 4077,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -276,7 +276,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytes": 8169,
|
"bytes": 8216,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -294,7 +294,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 13768,
|
"bytes": 15047,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -450,7 +450,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytes": 2115,
|
"bytes": 2127,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -481,7 +481,7 @@
|
||||||
"dist/human.esm.js.map": {
|
"dist/human.esm.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5127854
|
"bytes": 5129756
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -544,7 +544,7 @@
|
||||||
"bytesInOutput": 3025
|
"bytesInOutput": 3025
|
||||||
},
|
},
|
||||||
"src/facemesh/blazeface.js": {
|
"src/facemesh/blazeface.js": {
|
||||||
"bytesInOutput": 7111
|
"bytesInOutput": 7123
|
||||||
},
|
},
|
||||||
"src/facemesh/keypoints.js": {
|
"src/facemesh/keypoints.js": {
|
||||||
"bytesInOutput": 2768
|
"bytesInOutput": 2768
|
||||||
|
@ -571,10 +571,10 @@
|
||||||
"bytesInOutput": 1092
|
"bytesInOutput": 1092
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytesInOutput": 2300
|
"bytesInOutput": 2312
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 2034
|
"bytesInOutput": 2030
|
||||||
},
|
},
|
||||||
"src/posenet/modelBase.js": {
|
"src/posenet/modelBase.js": {
|
||||||
"bytesInOutput": 900
|
"bytesInOutput": 900
|
||||||
|
@ -610,10 +610,10 @@
|
||||||
"bytesInOutput": 903
|
"bytesInOutput": 903
|
||||||
},
|
},
|
||||||
"src/handpose/box.js": {
|
"src/handpose/box.js": {
|
||||||
"bytesInOutput": 2795
|
"bytesInOutput": 2734
|
||||||
},
|
},
|
||||||
"src/handpose/handdetector.js": {
|
"src/handpose/handdetector.js": {
|
||||||
"bytesInOutput": 4107
|
"bytesInOutput": 4017
|
||||||
},
|
},
|
||||||
"src/handpose/keypoints.js": {
|
"src/handpose/keypoints.js": {
|
||||||
"bytesInOutput": 263
|
"bytesInOutput": 263
|
||||||
|
@ -622,7 +622,7 @@
|
||||||
"bytesInOutput": 2663
|
"bytesInOutput": 2663
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytesInOutput": 7594
|
"bytesInOutput": 7641
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytesInOutput": 2260
|
"bytesInOutput": 2260
|
||||||
|
@ -631,19 +631,19 @@
|
||||||
"bytesInOutput": 20137
|
"bytesInOutput": 20137
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 2273
|
"bytesInOutput": 2291
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 3012
|
"bytesInOutput": 3012
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 11894
|
"bytesInOutput": 13231
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 2927113
|
"bytes": 2928384
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -67159,14 +67159,14 @@ var Human = (() => {
|
||||||
return [prediction, decodedBounds, scoresOut];
|
return [prediction, decodedBounds, scoresOut];
|
||||||
});
|
});
|
||||||
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxIndices = await boxIndicesTensor.array();
|
const boxIndices = boxIndicesTensor.arraySync();
|
||||||
boxIndicesTensor.dispose();
|
boxIndicesTensor.dispose();
|
||||||
const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));
|
const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));
|
||||||
const boundingBoxes = await Promise.all(boundingBoxesMap.map(async (boundingBox) => {
|
const boundingBoxes = boundingBoxesMap.map((boundingBox) => {
|
||||||
const vals = await boundingBox.array();
|
const vals = boundingBox.arraySync();
|
||||||
boundingBox.dispose();
|
boundingBox.dispose();
|
||||||
return vals;
|
return vals;
|
||||||
}));
|
});
|
||||||
const annotatedBoxes = [];
|
const annotatedBoxes = [];
|
||||||
for (let i = 0; i < boundingBoxes.length; i++) {
|
for (let i = 0; i < boundingBoxes.length; i++) {
|
||||||
const boundingBox = boundingBoxes[i];
|
const boundingBox = boundingBoxes[i];
|
||||||
|
@ -67193,9 +67193,12 @@ var Human = (() => {
|
||||||
}
|
}
|
||||||
async estimateFaces(input) {
|
async estimateFaces(input) {
|
||||||
const {boxes, scaleFactor} = await this.getBoundingBoxes(input);
|
const {boxes, scaleFactor} = await this.getBoundingBoxes(input);
|
||||||
return Promise.all(boxes.map(async (face) => {
|
const faces = [];
|
||||||
|
for (const face of boxes) {
|
||||||
|
const landmarkData = face.landmarks.arraySync();
|
||||||
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
||||||
const [landmarkData, boxData, probabilityData] = await Promise.all([face.landmarks, scaledBox, face.probability].map(async (d) => d.array()));
|
const boxData = scaleBox.arraySync();
|
||||||
|
const probabilityData = face.probability.arraySync();
|
||||||
const anchor = face.anchor;
|
const anchor = face.anchor;
|
||||||
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
||||||
const scaledLandmarks = landmarkData.map((landmark) => [
|
const scaledLandmarks = landmarkData.map((landmark) => [
|
||||||
|
@ -67212,8 +67215,9 @@ var Human = (() => {
|
||||||
face.landmarks.dispose();
|
face.landmarks.dispose();
|
||||||
face.probability.dispose();
|
face.probability.dispose();
|
||||||
scaledBox.dispose();
|
scaledBox.dispose();
|
||||||
return normalizedFace;
|
faces.push(normalizedFace);
|
||||||
}));
|
}
|
||||||
|
return faces;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
|
@ -70966,7 +70970,7 @@ var Human = (() => {
|
||||||
let ageT;
|
let ageT;
|
||||||
let genderT;
|
let genderT;
|
||||||
const obj = {};
|
const obj = {};
|
||||||
if (!config.profile) {
|
if (!config.profile || config.async) {
|
||||||
if (config.face.age.enabled)
|
if (config.face.age.enabled)
|
||||||
promises.push(ageT = models.age.predict(enhance));
|
promises.push(ageT = models.age.predict(enhance));
|
||||||
if (config.face.gender.enabled)
|
if (config.face.gender.enabled)
|
||||||
|
@ -70983,12 +70987,12 @@ var Human = (() => {
|
||||||
profile.run("gender", profileGender);
|
profile.run("gender", profileGender);
|
||||||
}
|
}
|
||||||
if (ageT) {
|
if (ageT) {
|
||||||
const data = await ageT.data();
|
const data = ageT.dataSync();
|
||||||
obj.age = Math.trunc(10 * data[0]) / 10;
|
obj.age = Math.trunc(10 * data[0]) / 10;
|
||||||
tf.dispose(ageT);
|
tf.dispose(ageT);
|
||||||
}
|
}
|
||||||
if (genderT) {
|
if (genderT) {
|
||||||
const data = await genderT.data();
|
const data = genderT.dataSync();
|
||||||
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
||||||
if (confidence > config.face.gender.minConfidence) {
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
||||||
|
@ -71043,11 +71047,11 @@ var Human = (() => {
|
||||||
let data;
|
let data;
|
||||||
if (!config.profile) {
|
if (!config.profile) {
|
||||||
const emotionT = await models.emotion.predict(grayscale);
|
const emotionT = await models.emotion.predict(grayscale);
|
||||||
data = await emotionT.data();
|
data = emotionT.dataSync();
|
||||||
tf.dispose(emotionT);
|
tf.dispose(emotionT);
|
||||||
} else {
|
} else {
|
||||||
const profileData = await tf.profile(() => models.emotion.predict(grayscale));
|
const profileData = await tf.profile(() => models.emotion.predict(grayscale));
|
||||||
data = await profileData.result.data();
|
data = profileData.result.dataSync();
|
||||||
profileData.result.dispose();
|
profileData.result.dispose();
|
||||||
profile.run("emotion", profileData);
|
profile.run("emotion", profileData);
|
||||||
}
|
}
|
||||||
|
@ -71630,10 +71634,7 @@ var Human = (() => {
|
||||||
function scaleBoxCoordinates(box, factor) {
|
function scaleBoxCoordinates(box, factor) {
|
||||||
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
||||||
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
||||||
const palmLandmarks = box.palmLandmarks.map((coord) => {
|
const palmLandmarks = box.palmLandmarks.map((coord) => [coord[0] * factor[0], coord[1] * factor[1]]);
|
||||||
const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];
|
|
||||||
return scaledCoord;
|
|
||||||
});
|
|
||||||
return {startPoint, endPoint, palmLandmarks};
|
return {startPoint, endPoint, palmLandmarks};
|
||||||
}
|
}
|
||||||
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
||||||
|
@ -71707,8 +71708,7 @@ var Human = (() => {
|
||||||
const rawBoxes = tf.slice(prediction, [0, 1], [-1, 4]);
|
const rawBoxes = tf.slice(prediction, [0, 1], [-1, 4]);
|
||||||
const boxes = this.normalizeBoxes(rawBoxes);
|
const boxes = this.normalizeBoxes(rawBoxes);
|
||||||
const boxesWithHandsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
const boxesWithHandsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxesWithHands = await boxesWithHandsTensor.array();
|
const boxesWithHands = boxesWithHandsTensor.arraySync();
|
||||||
const toDispose = [batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores];
|
|
||||||
const detectedHands = tf.tidy(() => {
|
const detectedHands = tf.tidy(() => {
|
||||||
const detectedBoxes = [];
|
const detectedBoxes = [];
|
||||||
for (const i in boxesWithHands) {
|
for (const i in boxesWithHands) {
|
||||||
|
@ -71720,7 +71720,7 @@ var Human = (() => {
|
||||||
}
|
}
|
||||||
return detectedBoxes;
|
return detectedBoxes;
|
||||||
});
|
});
|
||||||
toDispose.forEach((tensor) => tensor.dispose());
|
[batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores].forEach((tensor) => tensor.dispose());
|
||||||
return detectedHands;
|
return detectedHands;
|
||||||
}
|
}
|
||||||
async estimateHandBounds(input, config) {
|
async estimateHandBounds(input, config) {
|
||||||
|
@ -71728,12 +71728,10 @@ var Human = (() => {
|
||||||
this.scoreThreshold = config.scoreThreshold;
|
this.scoreThreshold = config.scoreThreshold;
|
||||||
this.maxHands = config.maxHands;
|
this.maxHands = config.maxHands;
|
||||||
const resized = input.resizeBilinear([this.width, this.height]);
|
const resized = input.resizeBilinear([this.width, this.height]);
|
||||||
const divided = resized.div(255);
|
const divided = resized.mul([1 / 127.5]);
|
||||||
const normalized = divided.sub(0.5);
|
const image = divided.sub(0.5);
|
||||||
const image = normalized.mul(2);
|
|
||||||
resized.dispose();
|
resized.dispose();
|
||||||
divided.dispose();
|
divided.dispose();
|
||||||
normalized.dispose();
|
|
||||||
const predictions = await this.getBoundingBoxes(image);
|
const predictions = await this.getBoundingBoxes(image);
|
||||||
image.dispose();
|
image.dispose();
|
||||||
if (!predictions || predictions.length === 0)
|
if (!predictions || predictions.length === 0)
|
||||||
|
@ -71741,10 +71739,10 @@ var Human = (() => {
|
||||||
const hands = [];
|
const hands = [];
|
||||||
for (const i in predictions) {
|
for (const i in predictions) {
|
||||||
const prediction = predictions[i];
|
const prediction = predictions[i];
|
||||||
const boundingBoxes = await prediction.boxes.array();
|
const boundingBoxes = prediction.boxes.dataSync();
|
||||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
const startPoint = [boundingBoxes[0], boundingBoxes[1]];
|
||||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
const endPoint = [boundingBoxes[2], boundingBoxes[3]];
|
||||||
const palmLandmarks = await prediction.palmLandmarks.array();
|
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
||||||
prediction.boxes.dispose();
|
prediction.boxes.dispose();
|
||||||
prediction.palmLandmarks.dispose();
|
prediction.palmLandmarks.dispose();
|
||||||
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
||||||
|
@ -71852,11 +71850,11 @@ var Human = (() => {
|
||||||
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
||||||
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
||||||
class HandPipeline {
|
class HandPipeline {
|
||||||
constructor(boundingBoxDetector, meshDetector, config) {
|
constructor(boundingBoxDetector, detector, config) {
|
||||||
this.regionsOfInterest = [];
|
this.regionsOfInterest = [];
|
||||||
this.runsWithoutHandDetector = 0;
|
this.runsWithoutHandDetector = 0;
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.detector = detector;
|
||||||
this.meshWidth = config.inputSize;
|
this.meshWidth = config.inputSize;
|
||||||
this.meshHeight = config.inputSize;
|
this.meshHeight = config.inputSize;
|
||||||
this.enlargeFactor = config.enlargeFactor;
|
this.enlargeFactor = config.enlargeFactor;
|
||||||
|
@ -71922,7 +71920,7 @@ var Human = (() => {
|
||||||
if (!this.regionsOfInterest)
|
if (!this.regionsOfInterest)
|
||||||
return hands;
|
return hands;
|
||||||
for (const i in this.regionsOfInterest) {
|
for (const i in this.regionsOfInterest) {
|
||||||
const currentBox = this.regionsOfInterest[i][0];
|
const currentBox = this.regionsOfInterest[i] ? this.regionsOfInterest[i][0] : null;
|
||||||
if (!currentBox)
|
if (!currentBox)
|
||||||
return hands;
|
return hands;
|
||||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||||
|
@ -71935,18 +71933,18 @@ var Human = (() => {
|
||||||
const handImage = croppedInput.div(255);
|
const handImage = croppedInput.div(255);
|
||||||
croppedInput.dispose();
|
croppedInput.dispose();
|
||||||
rotatedImage.dispose();
|
rotatedImage.dispose();
|
||||||
const prediction = this.meshDetector.predict(handImage);
|
const prediction = this.detector.predict(handImage);
|
||||||
const [flag, keypoints] = prediction;
|
const [confidence, keypoints] = prediction;
|
||||||
handImage.dispose();
|
handImage.dispose();
|
||||||
const flagValue = flag.dataSync()[0];
|
const confidenceVal = confidence.dataSync()[0];
|
||||||
flag.dispose();
|
confidence.dispose();
|
||||||
if (flagValue < config.minConfidence) {
|
if (confidenceVal < config.minConfidence) {
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
this.regionsOfInterest[i] = [];
|
this.regionsOfInterest[i] = [];
|
||||||
return hands;
|
return hands;
|
||||||
}
|
}
|
||||||
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
||||||
const rawCoords = await keypointsReshaped.array();
|
const rawCoords = keypointsReshaped.arraySync();
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
keypointsReshaped.dispose();
|
keypointsReshaped.dispose();
|
||||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||||
|
@ -71954,7 +71952,7 @@ var Human = (() => {
|
||||||
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
||||||
const result = {
|
const result = {
|
||||||
landmarks: coords,
|
landmarks: coords,
|
||||||
confidence: flagValue,
|
confidence: confidenceVal,
|
||||||
box: {
|
box: {
|
||||||
topLeft: nextBoundingBox.startPoint,
|
topLeft: nextBoundingBox.startPoint,
|
||||||
bottomRight: nextBoundingBox.endPoint
|
bottomRight: nextBoundingBox.endPoint
|
||||||
|
@ -72779,6 +72777,7 @@ var Human = (() => {
|
||||||
var config_default = {
|
var config_default = {
|
||||||
backend: "webgl",
|
backend: "webgl",
|
||||||
console: true,
|
console: true,
|
||||||
|
async: false,
|
||||||
profile: false,
|
profile: false,
|
||||||
deallocate: false,
|
deallocate: false,
|
||||||
scoped: false,
|
scoped: false,
|
||||||
|
@ -73001,10 +73000,12 @@ var Human = (() => {
|
||||||
this.version = app.version;
|
this.version = app.version;
|
||||||
this.defaults = defaults;
|
this.defaults = defaults;
|
||||||
this.config = defaults;
|
this.config = defaults;
|
||||||
this.fx = tf.ENV.flags.IS_BROWSER && typeof document !== "undefined" ? new fxImage.Canvas() : null;
|
this.fx = null;
|
||||||
this.state = "idle";
|
this.state = "idle";
|
||||||
this.numTensors = 0;
|
this.numTensors = 0;
|
||||||
this.analyzeMemoryLeaks = false;
|
this.analyzeMemoryLeaks = false;
|
||||||
|
this.inCanvas = null;
|
||||||
|
this.outCanvas = null;
|
||||||
this.models = {
|
this.models = {
|
||||||
facemesh: null,
|
facemesh: null,
|
||||||
posenet: null,
|
posenet: null,
|
||||||
|
@ -73081,12 +73082,14 @@ var Human = (() => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tfImage(input) {
|
tfImage(input) {
|
||||||
let filtered;
|
let tensor;
|
||||||
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
if (input instanceof tf.Tensor) {
|
||||||
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
|
tensor = tf.clone(input);
|
||||||
let targetWidth = originalWidth;
|
} else {
|
||||||
let targetHeight = originalHeight;
|
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
||||||
if (this.fx && this.config.filter.enabled && !(input instanceof tf.Tensor)) {
|
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
|
||||||
|
let targetWidth = originalWidth;
|
||||||
|
let targetHeight = originalHeight;
|
||||||
if (this.config.filter.width > 0)
|
if (this.config.filter.width > 0)
|
||||||
targetWidth = this.config.filter.width;
|
targetWidth = this.config.filter.width;
|
||||||
else if (this.config.filter.height > 0)
|
else if (this.config.filter.height > 0)
|
||||||
|
@ -73095,60 +73098,69 @@ var Human = (() => {
|
||||||
targetHeight = this.config.filter.height;
|
targetHeight = this.config.filter.height;
|
||||||
else if (this.config.filter.width > 0)
|
else if (this.config.filter.width > 0)
|
||||||
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||||
const offscreenCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
if (!this.inCanvas || this.inCanvas.width !== originalWidth || this.inCanvas.height !== originalHeight) {
|
||||||
if (offscreenCanvas.width !== targetWidth)
|
this.inCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||||
offscreenCanvas.width = targetWidth;
|
if (this.inCanvas.width !== targetWidth)
|
||||||
if (offscreenCanvas.height !== targetHeight)
|
this.inCanvas.width = targetWidth;
|
||||||
offscreenCanvas.height = targetHeight;
|
if (this.inCanvas.height !== targetHeight)
|
||||||
const ctx = offscreenCanvas.getContext("2d");
|
this.inCanvas.height = targetHeight;
|
||||||
|
}
|
||||||
|
const ctx = this.inCanvas.getContext("2d");
|
||||||
if (input instanceof ImageData)
|
if (input instanceof ImageData)
|
||||||
ctx.putImageData(input, 0, 0);
|
ctx.putImageData(input, 0, 0);
|
||||||
else
|
else
|
||||||
ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, this.inCanvas.width, this.inCanvas.height);
|
||||||
this.fx.reset();
|
if (this.config.filter.enabled) {
|
||||||
this.fx.addFilter("brightness", this.config.filter.brightness);
|
if (!this.outCanvas || this.inCanvas.width !== this.outCanvas.width || this.inCanvas.height !== this.outCanvas.height) {
|
||||||
if (this.config.filter.contrast !== 0)
|
this.outCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(this.inCanvas.width, this.inCanvas.height) : document.createElement("canvas");
|
||||||
this.fx.addFilter("contrast", this.config.filter.contrast);
|
if (this.outCanvas.width !== this.inCanvas.width)
|
||||||
if (this.config.filter.sharpness !== 0)
|
this.outCanvas.width = this.inCanvas.width;
|
||||||
this.fx.addFilter("sharpen", this.config.filter.sharpness);
|
if (this.outCanvas.height !== this.inCanvas.height)
|
||||||
if (this.config.filter.blur !== 0)
|
this.outCanvas.height = this.inCanvas.height;
|
||||||
this.fx.addFilter("blur", this.config.filter.blur);
|
}
|
||||||
if (this.config.filter.saturation !== 0)
|
if (!this.fx)
|
||||||
this.fx.addFilter("saturation", this.config.filter.saturation);
|
this.fx = tf.ENV.flags.IS_BROWSER && typeof document !== "undefined" ? new fxImage.Canvas({canvas: this.outCanvas}) : null;
|
||||||
if (this.config.filter.hue !== 0)
|
this.fx.reset();
|
||||||
this.fx.addFilter("hue", this.config.filter.hue);
|
this.fx.addFilter("brightness", this.config.filter.brightness);
|
||||||
if (this.config.filter.negative)
|
if (this.config.filter.contrast !== 0)
|
||||||
this.fx.addFilter("negative");
|
this.fx.addFilter("contrast", this.config.filter.contrast);
|
||||||
if (this.config.filter.sepia)
|
if (this.config.filter.sharpness !== 0)
|
||||||
this.fx.addFilter("sepia");
|
this.fx.addFilter("sharpen", this.config.filter.sharpness);
|
||||||
if (this.config.filter.vintage)
|
if (this.config.filter.blur !== 0)
|
||||||
this.fx.addFilter("brownie");
|
this.fx.addFilter("blur", this.config.filter.blur);
|
||||||
if (this.config.filter.sepia)
|
if (this.config.filter.saturation !== 0)
|
||||||
this.fx.addFilter("sepia");
|
this.fx.addFilter("saturation", this.config.filter.saturation);
|
||||||
if (this.config.filter.kodachrome)
|
if (this.config.filter.hue !== 0)
|
||||||
this.fx.addFilter("kodachrome");
|
this.fx.addFilter("hue", this.config.filter.hue);
|
||||||
if (this.config.filter.technicolor)
|
if (this.config.filter.negative)
|
||||||
this.fx.addFilter("technicolor");
|
this.fx.addFilter("negative");
|
||||||
if (this.config.filter.polaroid)
|
if (this.config.filter.sepia)
|
||||||
this.fx.addFilter("polaroid");
|
this.fx.addFilter("sepia");
|
||||||
if (this.config.filter.pixelate !== 0)
|
if (this.config.filter.vintage)
|
||||||
this.fx.addFilter("pixelate", this.config.filter.pixelate);
|
this.fx.addFilter("brownie");
|
||||||
filtered = this.fx.apply(offscreenCanvas);
|
if (this.config.filter.sepia)
|
||||||
}
|
this.fx.addFilter("sepia");
|
||||||
let tensor;
|
if (this.config.filter.kodachrome)
|
||||||
if (input instanceof tf.Tensor) {
|
this.fx.addFilter("kodachrome");
|
||||||
tensor = tf.clone(input);
|
if (this.config.filter.technicolor)
|
||||||
} else {
|
this.fx.addFilter("technicolor");
|
||||||
const canvas = filtered || input;
|
if (this.config.filter.polaroid)
|
||||||
|
this.fx.addFilter("polaroid");
|
||||||
|
if (this.config.filter.pixelate !== 0)
|
||||||
|
this.fx.addFilter("pixelate", this.config.filter.pixelate);
|
||||||
|
this.fx.apply(this.inCanvas);
|
||||||
|
}
|
||||||
|
if (!this.outCanvas)
|
||||||
|
this.outCanvas = this.inCanvas;
|
||||||
let pixels;
|
let pixels;
|
||||||
if (this.config.backend === "webgl" || canvas instanceof ImageData) {
|
if (this.config.backend === "webgl" || this.outCanvas instanceof ImageData) {
|
||||||
pixels = tf.browser.fromPixels(canvas);
|
pixels = tf.browser.fromPixels(this.outCanvas);
|
||||||
} else {
|
} else {
|
||||||
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||||
tempCanvas.width = targetWidth;
|
tempCanvas.width = targetWidth;
|
||||||
tempCanvas.height = targetHeight;
|
tempCanvas.height = targetHeight;
|
||||||
const tempCtx = tempCanvas.getContext("2d");
|
const tempCtx = tempCanvas.getContext("2d");
|
||||||
tempCtx.drawImage(canvas, 0, 0);
|
tempCtx.drawImage(this.outCanvas, 0, 0);
|
||||||
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
||||||
pixels = tf.browser.fromPixels(data);
|
pixels = tf.browser.fromPixels(data);
|
||||||
}
|
}
|
||||||
|
@ -73157,7 +73169,7 @@ var Human = (() => {
|
||||||
pixels.dispose();
|
pixels.dispose();
|
||||||
casted.dispose();
|
casted.dispose();
|
||||||
}
|
}
|
||||||
return {tensor, canvas: this.config.filter.return ? filtered : null};
|
return {tensor, canvas: this.config.filter.return ? this.outCanvas : null};
|
||||||
}
|
}
|
||||||
async detect(input, userConfig = {}) {
|
async detect(input, userConfig = {}) {
|
||||||
this.state = "config";
|
this.state = "config";
|
||||||
|
@ -73173,6 +73185,10 @@ var Human = (() => {
|
||||||
return {error};
|
return {error};
|
||||||
}
|
}
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
let poseRes;
|
||||||
|
let handRes;
|
||||||
|
let ssrRes;
|
||||||
|
let emotionRes;
|
||||||
const timeStart = now();
|
const timeStart = now();
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
await this.checkBackend();
|
await this.checkBackend();
|
||||||
|
@ -73194,18 +73210,28 @@ var Human = (() => {
|
||||||
const image = this.tfImage(input);
|
const image = this.tfImage(input);
|
||||||
perf.image = Math.trunc(now() - timeStamp);
|
perf.image = Math.trunc(now() - timeStamp);
|
||||||
const imageTensor = image.tensor;
|
const imageTensor = image.tensor;
|
||||||
this.state = "run:body";
|
if (this.config.async) {
|
||||||
timeStamp = now();
|
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
this.analyze("Start PoseNet");
|
} else {
|
||||||
const poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
this.state = "run:body";
|
||||||
this.analyze("End PoseNet:");
|
timeStamp = now();
|
||||||
perf.body = Math.trunc(now() - timeStamp);
|
this.analyze("Start PoseNet");
|
||||||
this.state = "run:hand";
|
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
timeStamp = now();
|
this.analyze("End PoseNet:");
|
||||||
this.analyze("Start HandPose:");
|
perf.body = Math.trunc(now() - timeStamp);
|
||||||
const handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
}
|
||||||
this.analyze("End HandPose:");
|
if (this.config.async) {
|
||||||
perf.hand = Math.trunc(now() - timeStamp);
|
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
} else {
|
||||||
|
this.state = "run:hand";
|
||||||
|
timeStamp = now();
|
||||||
|
this.analyze("Start HandPose:");
|
||||||
|
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
this.analyze("End HandPose:");
|
||||||
|
perf.hand = Math.trunc(now() - timeStamp);
|
||||||
|
}
|
||||||
|
if (this.config.async)
|
||||||
|
[poseRes, handRes] = await Promise.all([poseRes, handRes]);
|
||||||
const faceRes = [];
|
const faceRes = [];
|
||||||
if (this.config.face.enabled) {
|
if (this.config.face.enabled) {
|
||||||
this.state = "run:face";
|
this.state = "run:face";
|
||||||
|
@ -73220,11 +73246,11 @@ var Human = (() => {
|
||||||
}
|
}
|
||||||
this.state = "run:agegender";
|
this.state = "run:agegender";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const ssrData = this.config.face.age.enabled || this.config.face.gender.enabled ? await ssrnet.predict(face.image, this.config) : {};
|
ssrRes = this.config.face.age.enabled || this.config.face.gender.enabled ? await ssrnet.predict(face.image, this.config) : {};
|
||||||
perf.agegender = Math.trunc(now() - timeStamp);
|
perf.agegender = Math.trunc(now() - timeStamp);
|
||||||
this.state = "run:emotion";
|
this.state = "run:emotion";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const emotionData = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
||||||
perf.emotion = Math.trunc(now() - timeStamp);
|
perf.emotion = Math.trunc(now() - timeStamp);
|
||||||
face.image.dispose();
|
face.image.dispose();
|
||||||
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
|
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
|
||||||
|
@ -73233,10 +73259,10 @@ var Human = (() => {
|
||||||
box: face.box,
|
box: face.box,
|
||||||
mesh: face.mesh,
|
mesh: face.mesh,
|
||||||
annotations: face.annotations,
|
annotations: face.annotations,
|
||||||
age: ssrData.age,
|
age: ssrRes.age,
|
||||||
gender: ssrData.gender,
|
gender: ssrRes.gender,
|
||||||
agConfidence: ssrData.confidence,
|
agConfidence: ssrRes.confidence,
|
||||||
emotion: emotionData,
|
emotion: emotionRes,
|
||||||
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
|
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
|
||||||
});
|
});
|
||||||
this.analyze("End FaceMesh:");
|
this.analyze("End FaceMesh:");
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 7075,
|
"bytes": 7322,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
||||||
|
@ -153,7 +153,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 1958,
|
"bytes": 1954,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -164,7 +164,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/facemesh/blazeface.js": {
|
"src/facemesh/blazeface.js": {
|
||||||
"bytes": 7161,
|
"bytes": 6991,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -236,7 +236,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/box.js": {
|
"src/handpose/box.js": {
|
||||||
"bytes": 2627,
|
"bytes": 2572,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -244,7 +244,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/handpose/handdetector.js": {
|
"src/handpose/handdetector.js": {
|
||||||
"bytes": 4248,
|
"bytes": 4077,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -276,7 +276,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytes": 8169,
|
"bytes": 8216,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -294,7 +294,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 13768,
|
"bytes": 15047,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -450,7 +450,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytes": 2115,
|
"bytes": 2127,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -481,7 +481,7 @@
|
||||||
"dist/human.js.map": {
|
"dist/human.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5131716
|
"bytes": 5133654
|
||||||
},
|
},
|
||||||
"dist/human.js": {
|
"dist/human.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -544,7 +544,7 @@
|
||||||
"bytesInOutput": 3189
|
"bytesInOutput": 3189
|
||||||
},
|
},
|
||||||
"src/facemesh/blazeface.js": {
|
"src/facemesh/blazeface.js": {
|
||||||
"bytesInOutput": 7400
|
"bytesInOutput": 7420
|
||||||
},
|
},
|
||||||
"src/facemesh/keypoints.js": {
|
"src/facemesh/keypoints.js": {
|
||||||
"bytesInOutput": 2936
|
"bytesInOutput": 2936
|
||||||
|
@ -571,10 +571,10 @@
|
||||||
"bytesInOutput": 1140
|
"bytesInOutput": 1140
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytesInOutput": 2420
|
"bytesInOutput": 2432
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 2137
|
"bytesInOutput": 2133
|
||||||
},
|
},
|
||||||
"src/posenet/modelBase.js": {
|
"src/posenet/modelBase.js": {
|
||||||
"bytesInOutput": 954
|
"bytesInOutput": 954
|
||||||
|
@ -610,10 +610,10 @@
|
||||||
"bytesInOutput": 943
|
"bytesInOutput": 943
|
||||||
},
|
},
|
||||||
"src/handpose/box.js": {
|
"src/handpose/box.js": {
|
||||||
"bytesInOutput": 2931
|
"bytesInOutput": 2864
|
||||||
},
|
},
|
||||||
"src/handpose/handdetector.js": {
|
"src/handpose/handdetector.js": {
|
||||||
"bytesInOutput": 4247
|
"bytesInOutput": 4151
|
||||||
},
|
},
|
||||||
"src/handpose/keypoints.js": {
|
"src/handpose/keypoints.js": {
|
||||||
"bytesInOutput": 283
|
"bytesInOutput": 283
|
||||||
|
@ -622,7 +622,7 @@
|
||||||
"bytesInOutput": 2803
|
"bytesInOutput": 2803
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytesInOutput": 7911
|
"bytesInOutput": 7958
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytesInOutput": 2365
|
"bytesInOutput": 2365
|
||||||
|
@ -631,16 +631,16 @@
|
||||||
"bytesInOutput": 21557
|
"bytesInOutput": 21557
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 2471
|
"bytesInOutput": 2491
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 3144
|
"bytesInOutput": 3144
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 13235
|
"bytesInOutput": 14680
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 3073187
|
"bytes": 3074564
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -109,14 +109,14 @@ var require_blazeface = __commonJS((exports2) => {
|
||||||
return [prediction, decodedBounds, scoresOut];
|
return [prediction, decodedBounds, scoresOut];
|
||||||
});
|
});
|
||||||
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxIndices = await boxIndicesTensor.array();
|
const boxIndices = boxIndicesTensor.arraySync();
|
||||||
boxIndicesTensor.dispose();
|
boxIndicesTensor.dispose();
|
||||||
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
||||||
const boundingBoxes = await Promise.all(boundingBoxesMap.map(async (boundingBox) => {
|
const boundingBoxes = boundingBoxesMap.map((boundingBox) => {
|
||||||
const vals = await boundingBox.array();
|
const vals = boundingBox.arraySync();
|
||||||
boundingBox.dispose();
|
boundingBox.dispose();
|
||||||
return vals;
|
return vals;
|
||||||
}));
|
});
|
||||||
const annotatedBoxes = [];
|
const annotatedBoxes = [];
|
||||||
for (let i = 0; i < boundingBoxes.length; i++) {
|
for (let i = 0; i < boundingBoxes.length; i++) {
|
||||||
const boundingBox = boundingBoxes[i];
|
const boundingBox = boundingBoxes[i];
|
||||||
|
@ -143,9 +143,12 @@ var require_blazeface = __commonJS((exports2) => {
|
||||||
}
|
}
|
||||||
async estimateFaces(input) {
|
async estimateFaces(input) {
|
||||||
const {boxes, scaleFactor} = await this.getBoundingBoxes(input);
|
const {boxes, scaleFactor} = await this.getBoundingBoxes(input);
|
||||||
return Promise.all(boxes.map(async (face) => {
|
const faces = [];
|
||||||
|
for (const face of boxes) {
|
||||||
|
const landmarkData = face.landmarks.arraySync();
|
||||||
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
||||||
const [landmarkData, boxData, probabilityData] = await Promise.all([face.landmarks, scaledBox, face.probability].map(async (d) => d.array()));
|
const boxData = scaleBox.arraySync();
|
||||||
|
const probabilityData = face.probability.arraySync();
|
||||||
const anchor = face.anchor;
|
const anchor = face.anchor;
|
||||||
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
||||||
const scaledLandmarks = landmarkData.map((landmark) => [
|
const scaledLandmarks = landmarkData.map((landmark) => [
|
||||||
|
@ -162,8 +165,9 @@ var require_blazeface = __commonJS((exports2) => {
|
||||||
face.landmarks.dispose();
|
face.landmarks.dispose();
|
||||||
face.probability.dispose();
|
face.probability.dispose();
|
||||||
scaledBox.dispose();
|
scaledBox.dispose();
|
||||||
return normalizedFace;
|
faces.push(normalizedFace);
|
||||||
}));
|
}
|
||||||
|
return faces;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
|
@ -3916,7 +3920,7 @@ var require_ssrnet = __commonJS((exports2) => {
|
||||||
let ageT;
|
let ageT;
|
||||||
let genderT;
|
let genderT;
|
||||||
const obj = {};
|
const obj = {};
|
||||||
if (!config.profile) {
|
if (!config.profile || config.async) {
|
||||||
if (config.face.age.enabled)
|
if (config.face.age.enabled)
|
||||||
promises.push(ageT = models.age.predict(enhance));
|
promises.push(ageT = models.age.predict(enhance));
|
||||||
if (config.face.gender.enabled)
|
if (config.face.gender.enabled)
|
||||||
|
@ -3933,12 +3937,12 @@ var require_ssrnet = __commonJS((exports2) => {
|
||||||
profile2.run("gender", profileGender);
|
profile2.run("gender", profileGender);
|
||||||
}
|
}
|
||||||
if (ageT) {
|
if (ageT) {
|
||||||
const data = await ageT.data();
|
const data = ageT.dataSync();
|
||||||
obj.age = Math.trunc(10 * data[0]) / 10;
|
obj.age = Math.trunc(10 * data[0]) / 10;
|
||||||
tf2.dispose(ageT);
|
tf2.dispose(ageT);
|
||||||
}
|
}
|
||||||
if (genderT) {
|
if (genderT) {
|
||||||
const data = await genderT.data();
|
const data = genderT.dataSync();
|
||||||
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
||||||
if (confidence > config.face.gender.minConfidence) {
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
||||||
|
@ -3993,11 +3997,11 @@ var require_emotion = __commonJS((exports2) => {
|
||||||
let data;
|
let data;
|
||||||
if (!config.profile) {
|
if (!config.profile) {
|
||||||
const emotionT = await models.emotion.predict(grayscale);
|
const emotionT = await models.emotion.predict(grayscale);
|
||||||
data = await emotionT.data();
|
data = emotionT.dataSync();
|
||||||
tf2.dispose(emotionT);
|
tf2.dispose(emotionT);
|
||||||
} else {
|
} else {
|
||||||
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
|
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
|
||||||
data = await profileData.result.data();
|
data = profileData.result.dataSync();
|
||||||
profileData.result.dispose();
|
profileData.result.dispose();
|
||||||
profile2.run("emotion", profileData);
|
profile2.run("emotion", profileData);
|
||||||
}
|
}
|
||||||
|
@ -4580,10 +4584,7 @@ var require_box2 = __commonJS((exports2) => {
|
||||||
function scaleBoxCoordinates(box, factor) {
|
function scaleBoxCoordinates(box, factor) {
|
||||||
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
||||||
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
||||||
const palmLandmarks = box.palmLandmarks.map((coord) => {
|
const palmLandmarks = box.palmLandmarks.map((coord) => [coord[0] * factor[0], coord[1] * factor[1]]);
|
||||||
const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];
|
|
||||||
return scaledCoord;
|
|
||||||
});
|
|
||||||
return {startPoint, endPoint, palmLandmarks};
|
return {startPoint, endPoint, palmLandmarks};
|
||||||
}
|
}
|
||||||
exports2.scaleBoxCoordinates = scaleBoxCoordinates;
|
exports2.scaleBoxCoordinates = scaleBoxCoordinates;
|
||||||
|
@ -4657,8 +4658,7 @@ var require_handdetector = __commonJS((exports2) => {
|
||||||
const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]);
|
const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]);
|
||||||
const boxes = this.normalizeBoxes(rawBoxes);
|
const boxes = this.normalizeBoxes(rawBoxes);
|
||||||
const boxesWithHandsTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
const boxesWithHandsTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxesWithHands = await boxesWithHandsTensor.array();
|
const boxesWithHands = boxesWithHandsTensor.arraySync();
|
||||||
const toDispose = [batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores];
|
|
||||||
const detectedHands = tf2.tidy(() => {
|
const detectedHands = tf2.tidy(() => {
|
||||||
const detectedBoxes = [];
|
const detectedBoxes = [];
|
||||||
for (const i in boxesWithHands) {
|
for (const i in boxesWithHands) {
|
||||||
|
@ -4670,7 +4670,7 @@ var require_handdetector = __commonJS((exports2) => {
|
||||||
}
|
}
|
||||||
return detectedBoxes;
|
return detectedBoxes;
|
||||||
});
|
});
|
||||||
toDispose.forEach((tensor) => tensor.dispose());
|
[batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores].forEach((tensor) => tensor.dispose());
|
||||||
return detectedHands;
|
return detectedHands;
|
||||||
}
|
}
|
||||||
async estimateHandBounds(input, config) {
|
async estimateHandBounds(input, config) {
|
||||||
|
@ -4678,12 +4678,10 @@ var require_handdetector = __commonJS((exports2) => {
|
||||||
this.scoreThreshold = config.scoreThreshold;
|
this.scoreThreshold = config.scoreThreshold;
|
||||||
this.maxHands = config.maxHands;
|
this.maxHands = config.maxHands;
|
||||||
const resized = input.resizeBilinear([this.width, this.height]);
|
const resized = input.resizeBilinear([this.width, this.height]);
|
||||||
const divided = resized.div(255);
|
const divided = resized.mul([1 / 127.5]);
|
||||||
const normalized = divided.sub(0.5);
|
const image = divided.sub(0.5);
|
||||||
const image = normalized.mul(2);
|
|
||||||
resized.dispose();
|
resized.dispose();
|
||||||
divided.dispose();
|
divided.dispose();
|
||||||
normalized.dispose();
|
|
||||||
const predictions = await this.getBoundingBoxes(image);
|
const predictions = await this.getBoundingBoxes(image);
|
||||||
image.dispose();
|
image.dispose();
|
||||||
if (!predictions || predictions.length === 0)
|
if (!predictions || predictions.length === 0)
|
||||||
|
@ -4691,10 +4689,10 @@ var require_handdetector = __commonJS((exports2) => {
|
||||||
const hands = [];
|
const hands = [];
|
||||||
for (const i in predictions) {
|
for (const i in predictions) {
|
||||||
const prediction = predictions[i];
|
const prediction = predictions[i];
|
||||||
const boundingBoxes = await prediction.boxes.array();
|
const boundingBoxes = prediction.boxes.dataSync();
|
||||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
const startPoint = [boundingBoxes[0], boundingBoxes[1]];
|
||||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
const endPoint = [boundingBoxes[2], boundingBoxes[3]];
|
||||||
const palmLandmarks = await prediction.palmLandmarks.array();
|
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
||||||
prediction.boxes.dispose();
|
prediction.boxes.dispose();
|
||||||
prediction.palmLandmarks.dispose();
|
prediction.palmLandmarks.dispose();
|
||||||
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
||||||
|
@ -4802,11 +4800,11 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
||||||
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
||||||
class HandPipeline {
|
class HandPipeline {
|
||||||
constructor(boundingBoxDetector, meshDetector, config) {
|
constructor(boundingBoxDetector, detector, config) {
|
||||||
this.regionsOfInterest = [];
|
this.regionsOfInterest = [];
|
||||||
this.runsWithoutHandDetector = 0;
|
this.runsWithoutHandDetector = 0;
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.detector = detector;
|
||||||
this.meshWidth = config.inputSize;
|
this.meshWidth = config.inputSize;
|
||||||
this.meshHeight = config.inputSize;
|
this.meshHeight = config.inputSize;
|
||||||
this.enlargeFactor = config.enlargeFactor;
|
this.enlargeFactor = config.enlargeFactor;
|
||||||
|
@ -4872,7 +4870,7 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
if (!this.regionsOfInterest)
|
if (!this.regionsOfInterest)
|
||||||
return hands;
|
return hands;
|
||||||
for (const i in this.regionsOfInterest) {
|
for (const i in this.regionsOfInterest) {
|
||||||
const currentBox = this.regionsOfInterest[i][0];
|
const currentBox = this.regionsOfInterest[i] ? this.regionsOfInterest[i][0] : null;
|
||||||
if (!currentBox)
|
if (!currentBox)
|
||||||
return hands;
|
return hands;
|
||||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||||
|
@ -4885,18 +4883,18 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
const handImage = croppedInput.div(255);
|
const handImage = croppedInput.div(255);
|
||||||
croppedInput.dispose();
|
croppedInput.dispose();
|
||||||
rotatedImage.dispose();
|
rotatedImage.dispose();
|
||||||
const prediction = this.meshDetector.predict(handImage);
|
const prediction = this.detector.predict(handImage);
|
||||||
const [flag, keypoints] = prediction;
|
const [confidence, keypoints] = prediction;
|
||||||
handImage.dispose();
|
handImage.dispose();
|
||||||
const flagValue = flag.dataSync()[0];
|
const confidenceVal = confidence.dataSync()[0];
|
||||||
flag.dispose();
|
confidence.dispose();
|
||||||
if (flagValue < config.minConfidence) {
|
if (confidenceVal < config.minConfidence) {
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
this.regionsOfInterest[i] = [];
|
this.regionsOfInterest[i] = [];
|
||||||
return hands;
|
return hands;
|
||||||
}
|
}
|
||||||
const keypointsReshaped = tf2.reshape(keypoints, [-1, 3]);
|
const keypointsReshaped = tf2.reshape(keypoints, [-1, 3]);
|
||||||
const rawCoords = await keypointsReshaped.array();
|
const rawCoords = keypointsReshaped.arraySync();
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
keypointsReshaped.dispose();
|
keypointsReshaped.dispose();
|
||||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||||
|
@ -4904,7 +4902,7 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
||||||
const result = {
|
const result = {
|
||||||
landmarks: coords,
|
landmarks: coords,
|
||||||
confidence: flagValue,
|
confidence: confidenceVal,
|
||||||
box: {
|
box: {
|
||||||
topLeft: nextBoundingBox.startPoint,
|
topLeft: nextBoundingBox.startPoint,
|
||||||
bottomRight: nextBoundingBox.endPoint
|
bottomRight: nextBoundingBox.endPoint
|
||||||
|
@ -5729,6 +5727,7 @@ var require_config = __commonJS((exports2) => {
|
||||||
var config_default = {
|
var config_default = {
|
||||||
backend: "webgl",
|
backend: "webgl",
|
||||||
console: true,
|
console: true,
|
||||||
|
async: false,
|
||||||
profile: false,
|
profile: false,
|
||||||
deallocate: false,
|
deallocate: false,
|
||||||
scoped: false,
|
scoped: false,
|
||||||
|
@ -5950,10 +5949,12 @@ class Human {
|
||||||
this.version = app.version;
|
this.version = app.version;
|
||||||
this.defaults = defaults;
|
this.defaults = defaults;
|
||||||
this.config = defaults;
|
this.config = defaults;
|
||||||
this.fx = tf.ENV.flags.IS_BROWSER && typeof document !== "undefined" ? new fxImage.Canvas() : null;
|
this.fx = null;
|
||||||
this.state = "idle";
|
this.state = "idle";
|
||||||
this.numTensors = 0;
|
this.numTensors = 0;
|
||||||
this.analyzeMemoryLeaks = false;
|
this.analyzeMemoryLeaks = false;
|
||||||
|
this.inCanvas = null;
|
||||||
|
this.outCanvas = null;
|
||||||
this.models = {
|
this.models = {
|
||||||
facemesh: null,
|
facemesh: null,
|
||||||
posenet: null,
|
posenet: null,
|
||||||
|
@ -6030,12 +6031,14 @@ class Human {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tfImage(input) {
|
tfImage(input) {
|
||||||
let filtered;
|
let tensor;
|
||||||
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
if (input instanceof tf.Tensor) {
|
||||||
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
|
tensor = tf.clone(input);
|
||||||
let targetWidth = originalWidth;
|
} else {
|
||||||
let targetHeight = originalHeight;
|
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
||||||
if (this.fx && this.config.filter.enabled && !(input instanceof tf.Tensor)) {
|
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
|
||||||
|
let targetWidth = originalWidth;
|
||||||
|
let targetHeight = originalHeight;
|
||||||
if (this.config.filter.width > 0)
|
if (this.config.filter.width > 0)
|
||||||
targetWidth = this.config.filter.width;
|
targetWidth = this.config.filter.width;
|
||||||
else if (this.config.filter.height > 0)
|
else if (this.config.filter.height > 0)
|
||||||
|
@ -6044,60 +6047,69 @@ class Human {
|
||||||
targetHeight = this.config.filter.height;
|
targetHeight = this.config.filter.height;
|
||||||
else if (this.config.filter.width > 0)
|
else if (this.config.filter.width > 0)
|
||||||
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||||
const offscreenCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
if (!this.inCanvas || this.inCanvas.width !== originalWidth || this.inCanvas.height !== originalHeight) {
|
||||||
if (offscreenCanvas.width !== targetWidth)
|
this.inCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||||
offscreenCanvas.width = targetWidth;
|
if (this.inCanvas.width !== targetWidth)
|
||||||
if (offscreenCanvas.height !== targetHeight)
|
this.inCanvas.width = targetWidth;
|
||||||
offscreenCanvas.height = targetHeight;
|
if (this.inCanvas.height !== targetHeight)
|
||||||
const ctx = offscreenCanvas.getContext("2d");
|
this.inCanvas.height = targetHeight;
|
||||||
|
}
|
||||||
|
const ctx = this.inCanvas.getContext("2d");
|
||||||
if (input instanceof ImageData)
|
if (input instanceof ImageData)
|
||||||
ctx.putImageData(input, 0, 0);
|
ctx.putImageData(input, 0, 0);
|
||||||
else
|
else
|
||||||
ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, this.inCanvas.width, this.inCanvas.height);
|
||||||
this.fx.reset();
|
if (this.config.filter.enabled) {
|
||||||
this.fx.addFilter("brightness", this.config.filter.brightness);
|
if (!this.outCanvas || this.inCanvas.width !== this.outCanvas.width || this.inCanvas.height !== this.outCanvas.height) {
|
||||||
if (this.config.filter.contrast !== 0)
|
this.outCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(this.inCanvas.width, this.inCanvas.height) : document.createElement("canvas");
|
||||||
this.fx.addFilter("contrast", this.config.filter.contrast);
|
if (this.outCanvas.width !== this.inCanvas.width)
|
||||||
if (this.config.filter.sharpness !== 0)
|
this.outCanvas.width = this.inCanvas.width;
|
||||||
this.fx.addFilter("sharpen", this.config.filter.sharpness);
|
if (this.outCanvas.height !== this.inCanvas.height)
|
||||||
if (this.config.filter.blur !== 0)
|
this.outCanvas.height = this.inCanvas.height;
|
||||||
this.fx.addFilter("blur", this.config.filter.blur);
|
}
|
||||||
if (this.config.filter.saturation !== 0)
|
if (!this.fx)
|
||||||
this.fx.addFilter("saturation", this.config.filter.saturation);
|
this.fx = tf.ENV.flags.IS_BROWSER && typeof document !== "undefined" ? new fxImage.Canvas({canvas: this.outCanvas}) : null;
|
||||||
if (this.config.filter.hue !== 0)
|
this.fx.reset();
|
||||||
this.fx.addFilter("hue", this.config.filter.hue);
|
this.fx.addFilter("brightness", this.config.filter.brightness);
|
||||||
if (this.config.filter.negative)
|
if (this.config.filter.contrast !== 0)
|
||||||
this.fx.addFilter("negative");
|
this.fx.addFilter("contrast", this.config.filter.contrast);
|
||||||
if (this.config.filter.sepia)
|
if (this.config.filter.sharpness !== 0)
|
||||||
this.fx.addFilter("sepia");
|
this.fx.addFilter("sharpen", this.config.filter.sharpness);
|
||||||
if (this.config.filter.vintage)
|
if (this.config.filter.blur !== 0)
|
||||||
this.fx.addFilter("brownie");
|
this.fx.addFilter("blur", this.config.filter.blur);
|
||||||
if (this.config.filter.sepia)
|
if (this.config.filter.saturation !== 0)
|
||||||
this.fx.addFilter("sepia");
|
this.fx.addFilter("saturation", this.config.filter.saturation);
|
||||||
if (this.config.filter.kodachrome)
|
if (this.config.filter.hue !== 0)
|
||||||
this.fx.addFilter("kodachrome");
|
this.fx.addFilter("hue", this.config.filter.hue);
|
||||||
if (this.config.filter.technicolor)
|
if (this.config.filter.negative)
|
||||||
this.fx.addFilter("technicolor");
|
this.fx.addFilter("negative");
|
||||||
if (this.config.filter.polaroid)
|
if (this.config.filter.sepia)
|
||||||
this.fx.addFilter("polaroid");
|
this.fx.addFilter("sepia");
|
||||||
if (this.config.filter.pixelate !== 0)
|
if (this.config.filter.vintage)
|
||||||
this.fx.addFilter("pixelate", this.config.filter.pixelate);
|
this.fx.addFilter("brownie");
|
||||||
filtered = this.fx.apply(offscreenCanvas);
|
if (this.config.filter.sepia)
|
||||||
}
|
this.fx.addFilter("sepia");
|
||||||
let tensor;
|
if (this.config.filter.kodachrome)
|
||||||
if (input instanceof tf.Tensor) {
|
this.fx.addFilter("kodachrome");
|
||||||
tensor = tf.clone(input);
|
if (this.config.filter.technicolor)
|
||||||
} else {
|
this.fx.addFilter("technicolor");
|
||||||
const canvas = filtered || input;
|
if (this.config.filter.polaroid)
|
||||||
|
this.fx.addFilter("polaroid");
|
||||||
|
if (this.config.filter.pixelate !== 0)
|
||||||
|
this.fx.addFilter("pixelate", this.config.filter.pixelate);
|
||||||
|
this.fx.apply(this.inCanvas);
|
||||||
|
}
|
||||||
|
if (!this.outCanvas)
|
||||||
|
this.outCanvas = this.inCanvas;
|
||||||
let pixels;
|
let pixels;
|
||||||
if (this.config.backend === "webgl" || canvas instanceof ImageData) {
|
if (this.config.backend === "webgl" || this.outCanvas instanceof ImageData) {
|
||||||
pixels = tf.browser.fromPixels(canvas);
|
pixels = tf.browser.fromPixels(this.outCanvas);
|
||||||
} else {
|
} else {
|
||||||
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||||
tempCanvas.width = targetWidth;
|
tempCanvas.width = targetWidth;
|
||||||
tempCanvas.height = targetHeight;
|
tempCanvas.height = targetHeight;
|
||||||
const tempCtx = tempCanvas.getContext("2d");
|
const tempCtx = tempCanvas.getContext("2d");
|
||||||
tempCtx.drawImage(canvas, 0, 0);
|
tempCtx.drawImage(this.outCanvas, 0, 0);
|
||||||
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
||||||
pixels = tf.browser.fromPixels(data);
|
pixels = tf.browser.fromPixels(data);
|
||||||
}
|
}
|
||||||
|
@ -6106,7 +6118,7 @@ class Human {
|
||||||
pixels.dispose();
|
pixels.dispose();
|
||||||
casted.dispose();
|
casted.dispose();
|
||||||
}
|
}
|
||||||
return {tensor, canvas: this.config.filter.return ? filtered : null};
|
return {tensor, canvas: this.config.filter.return ? this.outCanvas : null};
|
||||||
}
|
}
|
||||||
async detect(input, userConfig = {}) {
|
async detect(input, userConfig = {}) {
|
||||||
this.state = "config";
|
this.state = "config";
|
||||||
|
@ -6122,6 +6134,10 @@ class Human {
|
||||||
return {error};
|
return {error};
|
||||||
}
|
}
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
let poseRes;
|
||||||
|
let handRes;
|
||||||
|
let ssrRes;
|
||||||
|
let emotionRes;
|
||||||
const timeStart = now();
|
const timeStart = now();
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
await this.checkBackend();
|
await this.checkBackend();
|
||||||
|
@ -6143,18 +6159,28 @@ class Human {
|
||||||
const image = this.tfImage(input);
|
const image = this.tfImage(input);
|
||||||
perf.image = Math.trunc(now() - timeStamp);
|
perf.image = Math.trunc(now() - timeStamp);
|
||||||
const imageTensor = image.tensor;
|
const imageTensor = image.tensor;
|
||||||
this.state = "run:body";
|
if (this.config.async) {
|
||||||
timeStamp = now();
|
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
this.analyze("Start PoseNet");
|
} else {
|
||||||
const poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
this.state = "run:body";
|
||||||
this.analyze("End PoseNet:");
|
timeStamp = now();
|
||||||
perf.body = Math.trunc(now() - timeStamp);
|
this.analyze("Start PoseNet");
|
||||||
this.state = "run:hand";
|
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
timeStamp = now();
|
this.analyze("End PoseNet:");
|
||||||
this.analyze("Start HandPose:");
|
perf.body = Math.trunc(now() - timeStamp);
|
||||||
const handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
}
|
||||||
this.analyze("End HandPose:");
|
if (this.config.async) {
|
||||||
perf.hand = Math.trunc(now() - timeStamp);
|
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
} else {
|
||||||
|
this.state = "run:hand";
|
||||||
|
timeStamp = now();
|
||||||
|
this.analyze("Start HandPose:");
|
||||||
|
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
this.analyze("End HandPose:");
|
||||||
|
perf.hand = Math.trunc(now() - timeStamp);
|
||||||
|
}
|
||||||
|
if (this.config.async)
|
||||||
|
[poseRes, handRes] = await Promise.all([poseRes, handRes]);
|
||||||
const faceRes = [];
|
const faceRes = [];
|
||||||
if (this.config.face.enabled) {
|
if (this.config.face.enabled) {
|
||||||
this.state = "run:face";
|
this.state = "run:face";
|
||||||
|
@ -6169,11 +6195,11 @@ class Human {
|
||||||
}
|
}
|
||||||
this.state = "run:agegender";
|
this.state = "run:agegender";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const ssrData = this.config.face.age.enabled || this.config.face.gender.enabled ? await ssrnet.predict(face.image, this.config) : {};
|
ssrRes = this.config.face.age.enabled || this.config.face.gender.enabled ? await ssrnet.predict(face.image, this.config) : {};
|
||||||
perf.agegender = Math.trunc(now() - timeStamp);
|
perf.agegender = Math.trunc(now() - timeStamp);
|
||||||
this.state = "run:emotion";
|
this.state = "run:emotion";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const emotionData = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
||||||
perf.emotion = Math.trunc(now() - timeStamp);
|
perf.emotion = Math.trunc(now() - timeStamp);
|
||||||
face.image.dispose();
|
face.image.dispose();
|
||||||
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
|
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
|
||||||
|
@ -6182,10 +6208,10 @@ class Human {
|
||||||
box: face.box,
|
box: face.box,
|
||||||
mesh: face.mesh,
|
mesh: face.mesh,
|
||||||
annotations: face.annotations,
|
annotations: face.annotations,
|
||||||
age: ssrData.age,
|
age: ssrRes.age,
|
||||||
gender: ssrData.gender,
|
gender: ssrRes.gender,
|
||||||
agConfidence: ssrData.confidence,
|
agConfidence: ssrRes.confidence,
|
||||||
emotion: emotionData,
|
emotion: emotionRes,
|
||||||
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
|
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
|
||||||
});
|
});
|
||||||
this.analyze("End FaceMesh:");
|
this.analyze("End FaceMesh:");
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -68200,14 +68200,14 @@ var require_blazeface = __commonJS((exports2) => {
|
||||||
return [prediction, decodedBounds, scoresOut];
|
return [prediction, decodedBounds, scoresOut];
|
||||||
});
|
});
|
||||||
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxIndices = await boxIndicesTensor.array();
|
const boxIndices = boxIndicesTensor.arraySync();
|
||||||
boxIndicesTensor.dispose();
|
boxIndicesTensor.dispose();
|
||||||
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
||||||
const boundingBoxes = await Promise.all(boundingBoxesMap.map(async (boundingBox) => {
|
const boundingBoxes = boundingBoxesMap.map((boundingBox) => {
|
||||||
const vals = await boundingBox.array();
|
const vals = boundingBox.arraySync();
|
||||||
boundingBox.dispose();
|
boundingBox.dispose();
|
||||||
return vals;
|
return vals;
|
||||||
}));
|
});
|
||||||
const annotatedBoxes = [];
|
const annotatedBoxes = [];
|
||||||
for (let i = 0; i < boundingBoxes.length; i++) {
|
for (let i = 0; i < boundingBoxes.length; i++) {
|
||||||
const boundingBox = boundingBoxes[i];
|
const boundingBox = boundingBoxes[i];
|
||||||
|
@ -68234,9 +68234,12 @@ var require_blazeface = __commonJS((exports2) => {
|
||||||
}
|
}
|
||||||
async estimateFaces(input) {
|
async estimateFaces(input) {
|
||||||
const {boxes, scaleFactor} = await this.getBoundingBoxes(input);
|
const {boxes, scaleFactor} = await this.getBoundingBoxes(input);
|
||||||
return Promise.all(boxes.map(async (face) => {
|
const faces = [];
|
||||||
|
for (const face of boxes) {
|
||||||
|
const landmarkData = face.landmarks.arraySync();
|
||||||
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
||||||
const [landmarkData, boxData, probabilityData] = await Promise.all([face.landmarks, scaledBox, face.probability].map(async (d) => d.array()));
|
const boxData = scaleBox.arraySync();
|
||||||
|
const probabilityData = face.probability.arraySync();
|
||||||
const anchor = face.anchor;
|
const anchor = face.anchor;
|
||||||
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
||||||
const scaledLandmarks = landmarkData.map((landmark) => [
|
const scaledLandmarks = landmarkData.map((landmark) => [
|
||||||
|
@ -68253,8 +68256,9 @@ var require_blazeface = __commonJS((exports2) => {
|
||||||
face.landmarks.dispose();
|
face.landmarks.dispose();
|
||||||
face.probability.dispose();
|
face.probability.dispose();
|
||||||
scaledBox.dispose();
|
scaledBox.dispose();
|
||||||
return normalizedFace;
|
faces.push(normalizedFace);
|
||||||
}));
|
}
|
||||||
|
return faces;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
|
@ -72007,7 +72011,7 @@ var require_ssrnet = __commonJS((exports2) => {
|
||||||
let ageT;
|
let ageT;
|
||||||
let genderT;
|
let genderT;
|
||||||
const obj = {};
|
const obj = {};
|
||||||
if (!config.profile) {
|
if (!config.profile || config.async) {
|
||||||
if (config.face.age.enabled)
|
if (config.face.age.enabled)
|
||||||
promises.push(ageT = models.age.predict(enhance));
|
promises.push(ageT = models.age.predict(enhance));
|
||||||
if (config.face.gender.enabled)
|
if (config.face.gender.enabled)
|
||||||
|
@ -72024,12 +72028,12 @@ var require_ssrnet = __commonJS((exports2) => {
|
||||||
profile2.run("gender", profileGender);
|
profile2.run("gender", profileGender);
|
||||||
}
|
}
|
||||||
if (ageT) {
|
if (ageT) {
|
||||||
const data = await ageT.data();
|
const data = ageT.dataSync();
|
||||||
obj.age = Math.trunc(10 * data[0]) / 10;
|
obj.age = Math.trunc(10 * data[0]) / 10;
|
||||||
tf2.dispose(ageT);
|
tf2.dispose(ageT);
|
||||||
}
|
}
|
||||||
if (genderT) {
|
if (genderT) {
|
||||||
const data = await genderT.data();
|
const data = genderT.dataSync();
|
||||||
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
||||||
if (confidence > config.face.gender.minConfidence) {
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
||||||
|
@ -72084,11 +72088,11 @@ var require_emotion = __commonJS((exports2) => {
|
||||||
let data;
|
let data;
|
||||||
if (!config.profile) {
|
if (!config.profile) {
|
||||||
const emotionT = await models.emotion.predict(grayscale);
|
const emotionT = await models.emotion.predict(grayscale);
|
||||||
data = await emotionT.data();
|
data = emotionT.dataSync();
|
||||||
tf2.dispose(emotionT);
|
tf2.dispose(emotionT);
|
||||||
} else {
|
} else {
|
||||||
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
|
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
|
||||||
data = await profileData.result.data();
|
data = profileData.result.dataSync();
|
||||||
profileData.result.dispose();
|
profileData.result.dispose();
|
||||||
profile2.run("emotion", profileData);
|
profile2.run("emotion", profileData);
|
||||||
}
|
}
|
||||||
|
@ -72671,10 +72675,7 @@ var require_box2 = __commonJS((exports2) => {
|
||||||
function scaleBoxCoordinates(box, factor) {
|
function scaleBoxCoordinates(box, factor) {
|
||||||
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
||||||
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
||||||
const palmLandmarks = box.palmLandmarks.map((coord) => {
|
const palmLandmarks = box.palmLandmarks.map((coord) => [coord[0] * factor[0], coord[1] * factor[1]]);
|
||||||
const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];
|
|
||||||
return scaledCoord;
|
|
||||||
});
|
|
||||||
return {startPoint, endPoint, palmLandmarks};
|
return {startPoint, endPoint, palmLandmarks};
|
||||||
}
|
}
|
||||||
exports2.scaleBoxCoordinates = scaleBoxCoordinates;
|
exports2.scaleBoxCoordinates = scaleBoxCoordinates;
|
||||||
|
@ -72748,8 +72749,7 @@ var require_handdetector = __commonJS((exports2) => {
|
||||||
const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]);
|
const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]);
|
||||||
const boxes = this.normalizeBoxes(rawBoxes);
|
const boxes = this.normalizeBoxes(rawBoxes);
|
||||||
const boxesWithHandsTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
const boxesWithHandsTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxesWithHands = await boxesWithHandsTensor.array();
|
const boxesWithHands = boxesWithHandsTensor.arraySync();
|
||||||
const toDispose = [batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores];
|
|
||||||
const detectedHands = tf2.tidy(() => {
|
const detectedHands = tf2.tidy(() => {
|
||||||
const detectedBoxes = [];
|
const detectedBoxes = [];
|
||||||
for (const i in boxesWithHands) {
|
for (const i in boxesWithHands) {
|
||||||
|
@ -72761,7 +72761,7 @@ var require_handdetector = __commonJS((exports2) => {
|
||||||
}
|
}
|
||||||
return detectedBoxes;
|
return detectedBoxes;
|
||||||
});
|
});
|
||||||
toDispose.forEach((tensor) => tensor.dispose());
|
[batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores].forEach((tensor) => tensor.dispose());
|
||||||
return detectedHands;
|
return detectedHands;
|
||||||
}
|
}
|
||||||
async estimateHandBounds(input, config) {
|
async estimateHandBounds(input, config) {
|
||||||
|
@ -72769,12 +72769,10 @@ var require_handdetector = __commonJS((exports2) => {
|
||||||
this.scoreThreshold = config.scoreThreshold;
|
this.scoreThreshold = config.scoreThreshold;
|
||||||
this.maxHands = config.maxHands;
|
this.maxHands = config.maxHands;
|
||||||
const resized = input.resizeBilinear([this.width, this.height]);
|
const resized = input.resizeBilinear([this.width, this.height]);
|
||||||
const divided = resized.div(255);
|
const divided = resized.mul([1 / 127.5]);
|
||||||
const normalized = divided.sub(0.5);
|
const image = divided.sub(0.5);
|
||||||
const image = normalized.mul(2);
|
|
||||||
resized.dispose();
|
resized.dispose();
|
||||||
divided.dispose();
|
divided.dispose();
|
||||||
normalized.dispose();
|
|
||||||
const predictions = await this.getBoundingBoxes(image);
|
const predictions = await this.getBoundingBoxes(image);
|
||||||
image.dispose();
|
image.dispose();
|
||||||
if (!predictions || predictions.length === 0)
|
if (!predictions || predictions.length === 0)
|
||||||
|
@ -72782,10 +72780,10 @@ var require_handdetector = __commonJS((exports2) => {
|
||||||
const hands = [];
|
const hands = [];
|
||||||
for (const i in predictions) {
|
for (const i in predictions) {
|
||||||
const prediction = predictions[i];
|
const prediction = predictions[i];
|
||||||
const boundingBoxes = await prediction.boxes.array();
|
const boundingBoxes = prediction.boxes.dataSync();
|
||||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
const startPoint = [boundingBoxes[0], boundingBoxes[1]];
|
||||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
const endPoint = [boundingBoxes[2], boundingBoxes[3]];
|
||||||
const palmLandmarks = await prediction.palmLandmarks.array();
|
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
||||||
prediction.boxes.dispose();
|
prediction.boxes.dispose();
|
||||||
prediction.palmLandmarks.dispose();
|
prediction.palmLandmarks.dispose();
|
||||||
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
hands.push(bounding.scaleBoxCoordinates({startPoint, endPoint, palmLandmarks}, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
||||||
|
@ -72893,11 +72891,11 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
||||||
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
||||||
class HandPipeline {
|
class HandPipeline {
|
||||||
constructor(boundingBoxDetector, meshDetector, config) {
|
constructor(boundingBoxDetector, detector, config) {
|
||||||
this.regionsOfInterest = [];
|
this.regionsOfInterest = [];
|
||||||
this.runsWithoutHandDetector = 0;
|
this.runsWithoutHandDetector = 0;
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.detector = detector;
|
||||||
this.meshWidth = config.inputSize;
|
this.meshWidth = config.inputSize;
|
||||||
this.meshHeight = config.inputSize;
|
this.meshHeight = config.inputSize;
|
||||||
this.enlargeFactor = config.enlargeFactor;
|
this.enlargeFactor = config.enlargeFactor;
|
||||||
|
@ -72963,7 +72961,7 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
if (!this.regionsOfInterest)
|
if (!this.regionsOfInterest)
|
||||||
return hands;
|
return hands;
|
||||||
for (const i in this.regionsOfInterest) {
|
for (const i in this.regionsOfInterest) {
|
||||||
const currentBox = this.regionsOfInterest[i][0];
|
const currentBox = this.regionsOfInterest[i] ? this.regionsOfInterest[i][0] : null;
|
||||||
if (!currentBox)
|
if (!currentBox)
|
||||||
return hands;
|
return hands;
|
||||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||||
|
@ -72976,18 +72974,18 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
const handImage = croppedInput.div(255);
|
const handImage = croppedInput.div(255);
|
||||||
croppedInput.dispose();
|
croppedInput.dispose();
|
||||||
rotatedImage.dispose();
|
rotatedImage.dispose();
|
||||||
const prediction = this.meshDetector.predict(handImage);
|
const prediction = this.detector.predict(handImage);
|
||||||
const [flag, keypoints] = prediction;
|
const [confidence, keypoints] = prediction;
|
||||||
handImage.dispose();
|
handImage.dispose();
|
||||||
const flagValue = flag.dataSync()[0];
|
const confidenceVal = confidence.dataSync()[0];
|
||||||
flag.dispose();
|
confidence.dispose();
|
||||||
if (flagValue < config.minConfidence) {
|
if (confidenceVal < config.minConfidence) {
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
this.regionsOfInterest[i] = [];
|
this.regionsOfInterest[i] = [];
|
||||||
return hands;
|
return hands;
|
||||||
}
|
}
|
||||||
const keypointsReshaped = tf2.reshape(keypoints, [-1, 3]);
|
const keypointsReshaped = tf2.reshape(keypoints, [-1, 3]);
|
||||||
const rawCoords = await keypointsReshaped.array();
|
const rawCoords = keypointsReshaped.arraySync();
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
keypointsReshaped.dispose();
|
keypointsReshaped.dispose();
|
||||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||||
|
@ -72995,7 +72993,7 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
this.updateRegionsOfInterest(nextBoundingBox, false, i);
|
||||||
const result = {
|
const result = {
|
||||||
landmarks: coords,
|
landmarks: coords,
|
||||||
confidence: flagValue,
|
confidence: confidenceVal,
|
||||||
box: {
|
box: {
|
||||||
topLeft: nextBoundingBox.startPoint,
|
topLeft: nextBoundingBox.startPoint,
|
||||||
bottomRight: nextBoundingBox.endPoint
|
bottomRight: nextBoundingBox.endPoint
|
||||||
|
@ -73820,6 +73818,7 @@ var require_config = __commonJS((exports2) => {
|
||||||
var config_default = {
|
var config_default = {
|
||||||
backend: "webgl",
|
backend: "webgl",
|
||||||
console: true,
|
console: true,
|
||||||
|
async: false,
|
||||||
profile: false,
|
profile: false,
|
||||||
deallocate: false,
|
deallocate: false,
|
||||||
scoped: false,
|
scoped: false,
|
||||||
|
@ -74041,10 +74040,12 @@ class Human {
|
||||||
this.version = app.version;
|
this.version = app.version;
|
||||||
this.defaults = defaults;
|
this.defaults = defaults;
|
||||||
this.config = defaults;
|
this.config = defaults;
|
||||||
this.fx = tf.ENV.flags.IS_BROWSER && typeof document !== "undefined" ? new fxImage.Canvas() : null;
|
this.fx = null;
|
||||||
this.state = "idle";
|
this.state = "idle";
|
||||||
this.numTensors = 0;
|
this.numTensors = 0;
|
||||||
this.analyzeMemoryLeaks = false;
|
this.analyzeMemoryLeaks = false;
|
||||||
|
this.inCanvas = null;
|
||||||
|
this.outCanvas = null;
|
||||||
this.models = {
|
this.models = {
|
||||||
facemesh: null,
|
facemesh: null,
|
||||||
posenet: null,
|
posenet: null,
|
||||||
|
@ -74121,12 +74122,14 @@ class Human {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tfImage(input) {
|
tfImage(input) {
|
||||||
let filtered;
|
let tensor;
|
||||||
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
if (input instanceof tf.Tensor) {
|
||||||
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
|
tensor = tf.clone(input);
|
||||||
let targetWidth = originalWidth;
|
} else {
|
||||||
let targetHeight = originalHeight;
|
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
||||||
if (this.fx && this.config.filter.enabled && !(input instanceof tf.Tensor)) {
|
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
|
||||||
|
let targetWidth = originalWidth;
|
||||||
|
let targetHeight = originalHeight;
|
||||||
if (this.config.filter.width > 0)
|
if (this.config.filter.width > 0)
|
||||||
targetWidth = this.config.filter.width;
|
targetWidth = this.config.filter.width;
|
||||||
else if (this.config.filter.height > 0)
|
else if (this.config.filter.height > 0)
|
||||||
|
@ -74135,60 +74138,69 @@ class Human {
|
||||||
targetHeight = this.config.filter.height;
|
targetHeight = this.config.filter.height;
|
||||||
else if (this.config.filter.width > 0)
|
else if (this.config.filter.width > 0)
|
||||||
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||||
const offscreenCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
if (!this.inCanvas || this.inCanvas.width !== originalWidth || this.inCanvas.height !== originalHeight) {
|
||||||
if (offscreenCanvas.width !== targetWidth)
|
this.inCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||||
offscreenCanvas.width = targetWidth;
|
if (this.inCanvas.width !== targetWidth)
|
||||||
if (offscreenCanvas.height !== targetHeight)
|
this.inCanvas.width = targetWidth;
|
||||||
offscreenCanvas.height = targetHeight;
|
if (this.inCanvas.height !== targetHeight)
|
||||||
const ctx = offscreenCanvas.getContext("2d");
|
this.inCanvas.height = targetHeight;
|
||||||
|
}
|
||||||
|
const ctx = this.inCanvas.getContext("2d");
|
||||||
if (input instanceof ImageData)
|
if (input instanceof ImageData)
|
||||||
ctx.putImageData(input, 0, 0);
|
ctx.putImageData(input, 0, 0);
|
||||||
else
|
else
|
||||||
ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, this.inCanvas.width, this.inCanvas.height);
|
||||||
this.fx.reset();
|
if (this.config.filter.enabled) {
|
||||||
this.fx.addFilter("brightness", this.config.filter.brightness);
|
if (!this.outCanvas || this.inCanvas.width !== this.outCanvas.width || this.inCanvas.height !== this.outCanvas.height) {
|
||||||
if (this.config.filter.contrast !== 0)
|
this.outCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(this.inCanvas.width, this.inCanvas.height) : document.createElement("canvas");
|
||||||
this.fx.addFilter("contrast", this.config.filter.contrast);
|
if (this.outCanvas.width !== this.inCanvas.width)
|
||||||
if (this.config.filter.sharpness !== 0)
|
this.outCanvas.width = this.inCanvas.width;
|
||||||
this.fx.addFilter("sharpen", this.config.filter.sharpness);
|
if (this.outCanvas.height !== this.inCanvas.height)
|
||||||
if (this.config.filter.blur !== 0)
|
this.outCanvas.height = this.inCanvas.height;
|
||||||
this.fx.addFilter("blur", this.config.filter.blur);
|
}
|
||||||
if (this.config.filter.saturation !== 0)
|
if (!this.fx)
|
||||||
this.fx.addFilter("saturation", this.config.filter.saturation);
|
this.fx = tf.ENV.flags.IS_BROWSER && typeof document !== "undefined" ? new fxImage.Canvas({canvas: this.outCanvas}) : null;
|
||||||
if (this.config.filter.hue !== 0)
|
this.fx.reset();
|
||||||
this.fx.addFilter("hue", this.config.filter.hue);
|
this.fx.addFilter("brightness", this.config.filter.brightness);
|
||||||
if (this.config.filter.negative)
|
if (this.config.filter.contrast !== 0)
|
||||||
this.fx.addFilter("negative");
|
this.fx.addFilter("contrast", this.config.filter.contrast);
|
||||||
if (this.config.filter.sepia)
|
if (this.config.filter.sharpness !== 0)
|
||||||
this.fx.addFilter("sepia");
|
this.fx.addFilter("sharpen", this.config.filter.sharpness);
|
||||||
if (this.config.filter.vintage)
|
if (this.config.filter.blur !== 0)
|
||||||
this.fx.addFilter("brownie");
|
this.fx.addFilter("blur", this.config.filter.blur);
|
||||||
if (this.config.filter.sepia)
|
if (this.config.filter.saturation !== 0)
|
||||||
this.fx.addFilter("sepia");
|
this.fx.addFilter("saturation", this.config.filter.saturation);
|
||||||
if (this.config.filter.kodachrome)
|
if (this.config.filter.hue !== 0)
|
||||||
this.fx.addFilter("kodachrome");
|
this.fx.addFilter("hue", this.config.filter.hue);
|
||||||
if (this.config.filter.technicolor)
|
if (this.config.filter.negative)
|
||||||
this.fx.addFilter("technicolor");
|
this.fx.addFilter("negative");
|
||||||
if (this.config.filter.polaroid)
|
if (this.config.filter.sepia)
|
||||||
this.fx.addFilter("polaroid");
|
this.fx.addFilter("sepia");
|
||||||
if (this.config.filter.pixelate !== 0)
|
if (this.config.filter.vintage)
|
||||||
this.fx.addFilter("pixelate", this.config.filter.pixelate);
|
this.fx.addFilter("brownie");
|
||||||
filtered = this.fx.apply(offscreenCanvas);
|
if (this.config.filter.sepia)
|
||||||
}
|
this.fx.addFilter("sepia");
|
||||||
let tensor;
|
if (this.config.filter.kodachrome)
|
||||||
if (input instanceof tf.Tensor) {
|
this.fx.addFilter("kodachrome");
|
||||||
tensor = tf.clone(input);
|
if (this.config.filter.technicolor)
|
||||||
} else {
|
this.fx.addFilter("technicolor");
|
||||||
const canvas = filtered || input;
|
if (this.config.filter.polaroid)
|
||||||
|
this.fx.addFilter("polaroid");
|
||||||
|
if (this.config.filter.pixelate !== 0)
|
||||||
|
this.fx.addFilter("pixelate", this.config.filter.pixelate);
|
||||||
|
this.fx.apply(this.inCanvas);
|
||||||
|
}
|
||||||
|
if (!this.outCanvas)
|
||||||
|
this.outCanvas = this.inCanvas;
|
||||||
let pixels;
|
let pixels;
|
||||||
if (this.config.backend === "webgl" || canvas instanceof ImageData) {
|
if (this.config.backend === "webgl" || this.outCanvas instanceof ImageData) {
|
||||||
pixels = tf.browser.fromPixels(canvas);
|
pixels = tf.browser.fromPixels(this.outCanvas);
|
||||||
} else {
|
} else {
|
||||||
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||||
tempCanvas.width = targetWidth;
|
tempCanvas.width = targetWidth;
|
||||||
tempCanvas.height = targetHeight;
|
tempCanvas.height = targetHeight;
|
||||||
const tempCtx = tempCanvas.getContext("2d");
|
const tempCtx = tempCanvas.getContext("2d");
|
||||||
tempCtx.drawImage(canvas, 0, 0);
|
tempCtx.drawImage(this.outCanvas, 0, 0);
|
||||||
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
||||||
pixels = tf.browser.fromPixels(data);
|
pixels = tf.browser.fromPixels(data);
|
||||||
}
|
}
|
||||||
|
@ -74197,7 +74209,7 @@ class Human {
|
||||||
pixels.dispose();
|
pixels.dispose();
|
||||||
casted.dispose();
|
casted.dispose();
|
||||||
}
|
}
|
||||||
return {tensor, canvas: this.config.filter.return ? filtered : null};
|
return {tensor, canvas: this.config.filter.return ? this.outCanvas : null};
|
||||||
}
|
}
|
||||||
async detect(input, userConfig = {}) {
|
async detect(input, userConfig = {}) {
|
||||||
this.state = "config";
|
this.state = "config";
|
||||||
|
@ -74213,6 +74225,10 @@ class Human {
|
||||||
return {error};
|
return {error};
|
||||||
}
|
}
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
let poseRes;
|
||||||
|
let handRes;
|
||||||
|
let ssrRes;
|
||||||
|
let emotionRes;
|
||||||
const timeStart = now();
|
const timeStart = now();
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
await this.checkBackend();
|
await this.checkBackend();
|
||||||
|
@ -74234,18 +74250,28 @@ class Human {
|
||||||
const image = this.tfImage(input);
|
const image = this.tfImage(input);
|
||||||
perf.image = Math.trunc(now() - timeStamp);
|
perf.image = Math.trunc(now() - timeStamp);
|
||||||
const imageTensor = image.tensor;
|
const imageTensor = image.tensor;
|
||||||
this.state = "run:body";
|
if (this.config.async) {
|
||||||
timeStamp = now();
|
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
this.analyze("Start PoseNet");
|
} else {
|
||||||
const poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
this.state = "run:body";
|
||||||
this.analyze("End PoseNet:");
|
timeStamp = now();
|
||||||
perf.body = Math.trunc(now() - timeStamp);
|
this.analyze("Start PoseNet");
|
||||||
this.state = "run:hand";
|
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
timeStamp = now();
|
this.analyze("End PoseNet:");
|
||||||
this.analyze("Start HandPose:");
|
perf.body = Math.trunc(now() - timeStamp);
|
||||||
const handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
}
|
||||||
this.analyze("End HandPose:");
|
if (this.config.async) {
|
||||||
perf.hand = Math.trunc(now() - timeStamp);
|
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
} else {
|
||||||
|
this.state = "run:hand";
|
||||||
|
timeStamp = now();
|
||||||
|
this.analyze("Start HandPose:");
|
||||||
|
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
this.analyze("End HandPose:");
|
||||||
|
perf.hand = Math.trunc(now() - timeStamp);
|
||||||
|
}
|
||||||
|
if (this.config.async)
|
||||||
|
[poseRes, handRes] = await Promise.all([poseRes, handRes]);
|
||||||
const faceRes = [];
|
const faceRes = [];
|
||||||
if (this.config.face.enabled) {
|
if (this.config.face.enabled) {
|
||||||
this.state = "run:face";
|
this.state = "run:face";
|
||||||
|
@ -74260,11 +74286,11 @@ class Human {
|
||||||
}
|
}
|
||||||
this.state = "run:agegender";
|
this.state = "run:agegender";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const ssrData = this.config.face.age.enabled || this.config.face.gender.enabled ? await ssrnet.predict(face.image, this.config) : {};
|
ssrRes = this.config.face.age.enabled || this.config.face.gender.enabled ? await ssrnet.predict(face.image, this.config) : {};
|
||||||
perf.agegender = Math.trunc(now() - timeStamp);
|
perf.agegender = Math.trunc(now() - timeStamp);
|
||||||
this.state = "run:emotion";
|
this.state = "run:emotion";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const emotionData = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
||||||
perf.emotion = Math.trunc(now() - timeStamp);
|
perf.emotion = Math.trunc(now() - timeStamp);
|
||||||
face.image.dispose();
|
face.image.dispose();
|
||||||
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
|
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
|
||||||
|
@ -74273,10 +74299,10 @@ class Human {
|
||||||
box: face.box,
|
box: face.box,
|
||||||
mesh: face.mesh,
|
mesh: face.mesh,
|
||||||
annotations: face.annotations,
|
annotations: face.annotations,
|
||||||
age: ssrData.age,
|
age: ssrRes.age,
|
||||||
gender: ssrData.gender,
|
gender: ssrRes.gender,
|
||||||
agConfidence: ssrData.confidence,
|
agConfidence: ssrRes.confidence,
|
||||||
emotion: emotionData,
|
emotion: emotionRes,
|
||||||
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
|
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
|
||||||
});
|
});
|
||||||
this.analyze("End FaceMesh:");
|
this.analyze("End FaceMesh:");
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 7075,
|
"bytes": 7322,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
|
@ -9,7 +9,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 1958,
|
"bytes": 1954,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/profile.js"
|
"path": "src/profile.js"
|
||||||
|
@ -17,7 +17,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/facemesh/blazeface.js": {
|
"src/facemesh/blazeface.js": {
|
||||||
"bytes": 7161,
|
"bytes": 6991,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/facemesh/box.js": {
|
"src/facemesh/box.js": {
|
||||||
|
@ -75,11 +75,11 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/box.js": {
|
"src/handpose/box.js": {
|
||||||
"bytes": 2627,
|
"bytes": 2572,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/handdetector.js": {
|
"src/handpose/handdetector.js": {
|
||||||
"bytes": 4248,
|
"bytes": 4077,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/handpose/box.js"
|
"path": "src/handpose/box.js"
|
||||||
|
@ -105,7 +105,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytes": 8169,
|
"bytes": 8216,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/handpose/box.js"
|
"path": "src/handpose/box.js"
|
||||||
|
@ -120,7 +120,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 13768,
|
"bytes": 15047,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/facemesh/facemesh.js"
|
"path": "src/facemesh/facemesh.js"
|
||||||
|
@ -263,7 +263,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytes": 2115,
|
"bytes": 2127,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/profile.js"
|
"path": "src/profile.js"
|
||||||
|
@ -275,13 +275,13 @@
|
||||||
"dist/human.node-nobundle.js.map": {
|
"dist/human.node-nobundle.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 268701
|
"bytes": 271906
|
||||||
},
|
},
|
||||||
"dist/human.node-nobundle.js": {
|
"dist/human.node-nobundle.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"src/facemesh/blazeface.js": {
|
"src/facemesh/blazeface.js": {
|
||||||
"bytesInOutput": 7125
|
"bytesInOutput": 7137
|
||||||
},
|
},
|
||||||
"src/facemesh/keypoints.js": {
|
"src/facemesh/keypoints.js": {
|
||||||
"bytesInOutput": 2771
|
"bytesInOutput": 2771
|
||||||
|
@ -308,10 +308,10 @@
|
||||||
"bytesInOutput": 1095
|
"bytesInOutput": 1095
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytesInOutput": 2314
|
"bytesInOutput": 2326
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 2047
|
"bytesInOutput": 2043
|
||||||
},
|
},
|
||||||
"src/posenet/modelBase.js": {
|
"src/posenet/modelBase.js": {
|
||||||
"bytesInOutput": 912
|
"bytesInOutput": 912
|
||||||
|
@ -347,10 +347,10 @@
|
||||||
"bytesInOutput": 917
|
"bytesInOutput": 917
|
||||||
},
|
},
|
||||||
"src/handpose/box.js": {
|
"src/handpose/box.js": {
|
||||||
"bytesInOutput": 2813
|
"bytesInOutput": 2752
|
||||||
},
|
},
|
||||||
"src/handpose/handdetector.js": {
|
"src/handpose/handdetector.js": {
|
||||||
"bytesInOutput": 4119
|
"bytesInOutput": 4029
|
||||||
},
|
},
|
||||||
"src/handpose/keypoints.js": {
|
"src/handpose/keypoints.js": {
|
||||||
"bytesInOutput": 265
|
"bytesInOutput": 265
|
||||||
|
@ -359,7 +359,7 @@
|
||||||
"bytesInOutput": 2671
|
"bytesInOutput": 2671
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytesInOutput": 7606
|
"bytesInOutput": 7653
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytesInOutput": 2273
|
"bytesInOutput": 2273
|
||||||
|
@ -368,7 +368,7 @@
|
||||||
"bytesInOutput": 20139
|
"bytesInOutput": 20139
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 2275
|
"bytesInOutput": 2293
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 3015
|
"bytesInOutput": 3015
|
||||||
|
@ -377,10 +377,10 @@
|
||||||
"bytesInOutput": 47
|
"bytesInOutput": 47
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 11904
|
"bytesInOutput": 13241
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 158304
|
"bytes": 159575
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,11 +37,11 @@ async function predict(image, config) {
|
||||||
let data;
|
let data;
|
||||||
if (!config.profile) {
|
if (!config.profile) {
|
||||||
const emotionT = await models.emotion.predict(grayscale);
|
const emotionT = await models.emotion.predict(grayscale);
|
||||||
data = await emotionT.data();
|
data = emotionT.dataSync();
|
||||||
tf.dispose(emotionT);
|
tf.dispose(emotionT);
|
||||||
} else {
|
} else {
|
||||||
const profileData = await tf.profile(() => models.emotion.predict(grayscale));
|
const profileData = await tf.profile(() => models.emotion.predict(grayscale));
|
||||||
data = await profileData.result.data();
|
data = profileData.result.dataSync();
|
||||||
profileData.result.dispose();
|
profileData.result.dispose();
|
||||||
profile.run('emotion', profileData);
|
profile.run('emotion', profileData);
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,14 +103,15 @@ class BlazeFaceModel {
|
||||||
return [prediction, decodedBounds, scoresOut];
|
return [prediction, decodedBounds, scoresOut];
|
||||||
});
|
});
|
||||||
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxIndices = await boxIndicesTensor.array();
|
const boxIndices = boxIndicesTensor.arraySync();
|
||||||
boxIndicesTensor.dispose();
|
boxIndicesTensor.dispose();
|
||||||
const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));
|
const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));
|
||||||
const boundingBoxes = await Promise.all(boundingBoxesMap.map(async (boundingBox) => {
|
const boundingBoxes = boundingBoxesMap.map((boundingBox) => {
|
||||||
const vals = await boundingBox.array();
|
const vals = boundingBox.arraySync();
|
||||||
boundingBox.dispose();
|
boundingBox.dispose();
|
||||||
return vals;
|
return vals;
|
||||||
}));
|
});
|
||||||
|
|
||||||
const annotatedBoxes = [];
|
const annotatedBoxes = [];
|
||||||
for (let i = 0; i < boundingBoxes.length; i++) {
|
for (let i = 0; i < boundingBoxes.length; i++) {
|
||||||
const boundingBox = boundingBoxes[i];
|
const boundingBox = boundingBoxes[i];
|
||||||
|
@ -120,12 +121,6 @@ class BlazeFaceModel {
|
||||||
const sliced = tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]);
|
const sliced = tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]);
|
||||||
const squeezed = sliced.squeeze();
|
const squeezed = sliced.squeeze();
|
||||||
const landmarks = squeezed.reshape([NUM_LANDMARKS, -1]);
|
const landmarks = squeezed.reshape([NUM_LANDMARKS, -1]);
|
||||||
/*
|
|
||||||
const landmarks = tf
|
|
||||||
.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1])
|
|
||||||
.squeeze()
|
|
||||||
.reshape([NUM_LANDMARKS, -1]);
|
|
||||||
*/
|
|
||||||
const probability = tf.slice(scores, [boxIndex], [1]);
|
const probability = tf.slice(scores, [boxIndex], [1]);
|
||||||
const annotatedBox = { box, landmarks, probability, anchor };
|
const annotatedBox = { box, landmarks, probability, anchor };
|
||||||
annotatedBoxes.push(annotatedBox);
|
annotatedBoxes.push(annotatedBox);
|
||||||
|
@ -145,9 +140,12 @@ class BlazeFaceModel {
|
||||||
|
|
||||||
async estimateFaces(input) {
|
async estimateFaces(input) {
|
||||||
const { boxes, scaleFactor } = await this.getBoundingBoxes(input);
|
const { boxes, scaleFactor } = await this.getBoundingBoxes(input);
|
||||||
return Promise.all(boxes.map(async (face) => {
|
const faces = [];
|
||||||
|
for (const face of boxes) {
|
||||||
|
const landmarkData = face.landmarks.arraySync();
|
||||||
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
|
||||||
const [landmarkData, boxData, probabilityData] = await Promise.all([face.landmarks, scaledBox, face.probability].map(async (d) => d.array()));
|
const boxData = scaleBox.arraySync();
|
||||||
|
const probabilityData = face.probability.arraySync();
|
||||||
const anchor = face.anchor;
|
const anchor = face.anchor;
|
||||||
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
const [scaleFactorX, scaleFactorY] = scaleFactor;
|
||||||
const scaledLandmarks = landmarkData
|
const scaledLandmarks = landmarkData
|
||||||
|
@ -165,8 +163,9 @@ class BlazeFaceModel {
|
||||||
face.landmarks.dispose();
|
face.landmarks.dispose();
|
||||||
face.probability.dispose();
|
face.probability.dispose();
|
||||||
scaledBox.dispose();
|
scaledBox.dispose();
|
||||||
return normalizedFace;
|
faces.push(normalizedFace);
|
||||||
}));
|
}
|
||||||
|
return faces;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,10 +30,7 @@ exports.cutBoxFromImageAndResize = cutBoxFromImageAndResize;
|
||||||
function scaleBoxCoordinates(box, factor) {
|
function scaleBoxCoordinates(box, factor) {
|
||||||
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
||||||
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
||||||
const palmLandmarks = box.palmLandmarks.map((coord) => {
|
const palmLandmarks = box.palmLandmarks.map((coord) => [coord[0] * factor[0], coord[1] * factor[1]]);
|
||||||
const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];
|
|
||||||
return scaledCoord;
|
|
||||||
});
|
|
||||||
return { startPoint, endPoint, palmLandmarks };
|
return { startPoint, endPoint, palmLandmarks };
|
||||||
}
|
}
|
||||||
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
||||||
|
|
|
@ -40,8 +40,7 @@ class HandDetector {
|
||||||
const rawBoxes = tf.slice(prediction, [0, 1], [-1, 4]);
|
const rawBoxes = tf.slice(prediction, [0, 1], [-1, 4]);
|
||||||
const boxes = this.normalizeBoxes(rawBoxes);
|
const boxes = this.normalizeBoxes(rawBoxes);
|
||||||
const boxesWithHandsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
const boxesWithHandsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
||||||
const boxesWithHands = await boxesWithHandsTensor.array();
|
const boxesWithHands = boxesWithHandsTensor.arraySync();
|
||||||
const toDispose = [batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores];
|
|
||||||
const detectedHands = tf.tidy(() => {
|
const detectedHands = tf.tidy(() => {
|
||||||
const detectedBoxes = [];
|
const detectedBoxes = [];
|
||||||
for (const i in boxesWithHands) {
|
for (const i in boxesWithHands) {
|
||||||
|
@ -53,7 +52,7 @@ class HandDetector {
|
||||||
}
|
}
|
||||||
return detectedBoxes;
|
return detectedBoxes;
|
||||||
});
|
});
|
||||||
toDispose.forEach((tensor) => tensor.dispose());
|
[batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores].forEach((tensor) => tensor.dispose());
|
||||||
return detectedHands;
|
return detectedHands;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,28 +63,24 @@ class HandDetector {
|
||||||
* @param input The image to classify.
|
* @param input The image to classify.
|
||||||
*/
|
*/
|
||||||
async estimateHandBounds(input, config) {
|
async estimateHandBounds(input, config) {
|
||||||
// const inputHeight = input.shape[2];
|
|
||||||
// const inputWidth = input.shape[1];
|
|
||||||
this.iouThreshold = config.iouThreshold;
|
this.iouThreshold = config.iouThreshold;
|
||||||
this.scoreThreshold = config.scoreThreshold;
|
this.scoreThreshold = config.scoreThreshold;
|
||||||
this.maxHands = config.maxHands;
|
this.maxHands = config.maxHands;
|
||||||
const resized = input.resizeBilinear([this.width, this.height]);
|
const resized = input.resizeBilinear([this.width, this.height]);
|
||||||
const divided = resized.div(255);
|
const divided = resized.mul([1 / 127.5]);
|
||||||
const normalized = divided.sub(0.5);
|
const image = divided.sub(0.5);
|
||||||
const image = normalized.mul(2.0);
|
|
||||||
resized.dispose();
|
resized.dispose();
|
||||||
divided.dispose();
|
divided.dispose();
|
||||||
normalized.dispose();
|
|
||||||
const predictions = await this.getBoundingBoxes(image);
|
const predictions = await this.getBoundingBoxes(image);
|
||||||
image.dispose();
|
image.dispose();
|
||||||
if (!predictions || (predictions.length === 0)) return null;
|
if (!predictions || (predictions.length === 0)) return null;
|
||||||
const hands = [];
|
const hands = [];
|
||||||
for (const i in predictions) {
|
for (const i in predictions) {
|
||||||
const prediction = predictions[i];
|
const prediction = predictions[i];
|
||||||
const boundingBoxes = await prediction.boxes.array();
|
const boundingBoxes = prediction.boxes.dataSync();
|
||||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
const startPoint = [boundingBoxes[0], boundingBoxes[1]];
|
||||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
const endPoint = [boundingBoxes[2], boundingBoxes[3]];
|
||||||
const palmLandmarks = await prediction.palmLandmarks.array();
|
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
||||||
prediction.boxes.dispose();
|
prediction.boxes.dispose();
|
||||||
prediction.palmLandmarks.dispose();
|
prediction.palmLandmarks.dispose();
|
||||||
hands.push(bounding.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks }, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
hands.push(bounding.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks }, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
||||||
|
|
|
@ -12,11 +12,11 @@ const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
||||||
|
|
||||||
// The Pipeline coordinates between the bounding box and skeleton models.
|
// The Pipeline coordinates between the bounding box and skeleton models.
|
||||||
class HandPipeline {
|
class HandPipeline {
|
||||||
constructor(boundingBoxDetector, meshDetector, config) {
|
constructor(boundingBoxDetector, detector, config) {
|
||||||
this.regionsOfInterest = [];
|
this.regionsOfInterest = [];
|
||||||
this.runsWithoutHandDetector = 0;
|
this.runsWithoutHandDetector = 0;
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.detector = detector;
|
||||||
this.meshWidth = config.inputSize;
|
this.meshWidth = config.inputSize;
|
||||||
this.meshHeight = config.inputSize;
|
this.meshHeight = config.inputSize;
|
||||||
this.enlargeFactor = config.enlargeFactor;
|
this.enlargeFactor = config.enlargeFactor;
|
||||||
|
@ -93,7 +93,7 @@ class HandPipeline {
|
||||||
const hands = [];
|
const hands = [];
|
||||||
if (!this.regionsOfInterest) return hands;
|
if (!this.regionsOfInterest) return hands;
|
||||||
for (const i in this.regionsOfInterest) {
|
for (const i in this.regionsOfInterest) {
|
||||||
const currentBox = this.regionsOfInterest[i][0];
|
const currentBox = this.regionsOfInterest[i] ? this.regionsOfInterest[i][0] : null;
|
||||||
if (!currentBox) return hands;
|
if (!currentBox) return hands;
|
||||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||||
const palmCenter = bounding.getBoxCenter(currentBox);
|
const palmCenter = bounding.getBoxCenter(currentBox);
|
||||||
|
@ -105,18 +105,18 @@ class HandPipeline {
|
||||||
const handImage = croppedInput.div(255);
|
const handImage = croppedInput.div(255);
|
||||||
croppedInput.dispose();
|
croppedInput.dispose();
|
||||||
rotatedImage.dispose();
|
rotatedImage.dispose();
|
||||||
const prediction = this.meshDetector.predict(handImage);
|
const prediction = this.detector.predict(handImage);
|
||||||
const [flag, keypoints] = prediction;
|
const [confidence, keypoints] = prediction;
|
||||||
handImage.dispose();
|
handImage.dispose();
|
||||||
const flagValue = flag.dataSync()[0];
|
const confidenceVal = confidence.dataSync()[0];
|
||||||
flag.dispose();
|
confidence.dispose();
|
||||||
if (flagValue < config.minConfidence) {
|
if (confidenceVal < config.minConfidence) {
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
this.regionsOfInterest[i] = [];
|
this.regionsOfInterest[i] = [];
|
||||||
return hands;
|
return hands;
|
||||||
}
|
}
|
||||||
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
||||||
const rawCoords = await keypointsReshaped.array();
|
const rawCoords = keypointsReshaped.arraySync();
|
||||||
keypoints.dispose();
|
keypoints.dispose();
|
||||||
keypointsReshaped.dispose();
|
keypointsReshaped.dispose();
|
||||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||||
|
@ -124,7 +124,7 @@ class HandPipeline {
|
||||||
this.updateRegionsOfInterest(nextBoundingBox, false /* force replace */, i);
|
this.updateRegionsOfInterest(nextBoundingBox, false /* force replace */, i);
|
||||||
const result = {
|
const result = {
|
||||||
landmarks: coords,
|
landmarks: coords,
|
||||||
confidence: flagValue,
|
confidence: confidenceVal,
|
||||||
box: {
|
box: {
|
||||||
topLeft: nextBoundingBox.startPoint,
|
topLeft: nextBoundingBox.startPoint,
|
||||||
bottomRight: nextBoundingBox.endPoint,
|
bottomRight: nextBoundingBox.endPoint,
|
||||||
|
|
140
src/human.js
140
src/human.js
|
@ -61,10 +61,13 @@ class Human {
|
||||||
this.version = app.version;
|
this.version = app.version;
|
||||||
this.defaults = defaults;
|
this.defaults = defaults;
|
||||||
this.config = defaults;
|
this.config = defaults;
|
||||||
this.fx = (tf.ENV.flags.IS_BROWSER && (typeof document !== 'undefined')) ? new fxImage.Canvas() : null;
|
this.fx = null;
|
||||||
this.state = 'idle';
|
this.state = 'idle';
|
||||||
this.numTensors = 0;
|
this.numTensors = 0;
|
||||||
this.analyzeMemoryLeaks = false;
|
this.analyzeMemoryLeaks = false;
|
||||||
|
// internal temp canvases
|
||||||
|
this.inCanvas = null;
|
||||||
|
this.outCanvas = null;
|
||||||
// object that contains all initialized models
|
// object that contains all initialized models
|
||||||
this.models = {
|
this.models = {
|
||||||
facemesh: null,
|
facemesh: null,
|
||||||
|
@ -160,56 +163,62 @@ class Human {
|
||||||
}
|
}
|
||||||
|
|
||||||
tfImage(input) {
|
tfImage(input) {
|
||||||
// let imageData;
|
|
||||||
let filtered;
|
|
||||||
const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
|
|
||||||
const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));
|
|
||||||
let targetWidth = originalWidth;
|
|
||||||
let targetHeight = originalHeight;
|
|
||||||
if (this.fx && this.config.filter.enabled && !(input instanceof tf.Tensor)) {
|
|
||||||
if (this.config.filter.width > 0) targetWidth = this.config.filter.width;
|
|
||||||
else if (this.config.filter.height > 0) targetWidth = originalWidth * (this.config.filter.height / originalHeight);
|
|
||||||
if (this.config.filter.height > 0) targetHeight = this.config.filter.height;
|
|
||||||
else if (this.config.filter.width > 0) targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
|
||||||
const offscreenCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
|
|
||||||
if (offscreenCanvas.width !== targetWidth) offscreenCanvas.width = targetWidth;
|
|
||||||
if (offscreenCanvas.height !== targetHeight) offscreenCanvas.height = targetHeight;
|
|
||||||
const ctx = offscreenCanvas.getContext('2d');
|
|
||||||
if (input instanceof ImageData) ctx.putImageData(input, 0, 0);
|
|
||||||
else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
|
||||||
this.fx.reset();
|
|
||||||
this.fx.addFilter('brightness', this.config.filter.brightness); // must have at least one filter enabled
|
|
||||||
if (this.config.filter.contrast !== 0) this.fx.addFilter('contrast', this.config.filter.contrast);
|
|
||||||
if (this.config.filter.sharpness !== 0) this.fx.addFilter('sharpen', this.config.filter.sharpness);
|
|
||||||
if (this.config.filter.blur !== 0) this.fx.addFilter('blur', this.config.filter.blur);
|
|
||||||
if (this.config.filter.saturation !== 0) this.fx.addFilter('saturation', this.config.filter.saturation);
|
|
||||||
if (this.config.filter.hue !== 0) this.fx.addFilter('hue', this.config.filter.hue);
|
|
||||||
if (this.config.filter.negative) this.fx.addFilter('negative');
|
|
||||||
if (this.config.filter.sepia) this.fx.addFilter('sepia');
|
|
||||||
if (this.config.filter.vintage) this.fx.addFilter('brownie');
|
|
||||||
if (this.config.filter.sepia) this.fx.addFilter('sepia');
|
|
||||||
if (this.config.filter.kodachrome) this.fx.addFilter('kodachrome');
|
|
||||||
if (this.config.filter.technicolor) this.fx.addFilter('technicolor');
|
|
||||||
if (this.config.filter.polaroid) this.fx.addFilter('polaroid');
|
|
||||||
if (this.config.filter.pixelate !== 0) this.fx.addFilter('pixelate', this.config.filter.pixelate);
|
|
||||||
filtered = this.fx.apply(offscreenCanvas);
|
|
||||||
}
|
|
||||||
let tensor;
|
let tensor;
|
||||||
if (input instanceof tf.Tensor) {
|
if (input instanceof tf.Tensor) {
|
||||||
tensor = tf.clone(input);
|
tensor = tf.clone(input);
|
||||||
} else {
|
} else {
|
||||||
const canvas = filtered || input;
|
const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
|
||||||
|
const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));
|
||||||
|
let targetWidth = originalWidth;
|
||||||
|
let targetHeight = originalHeight;
|
||||||
|
if (this.config.filter.width > 0) targetWidth = this.config.filter.width;
|
||||||
|
else if (this.config.filter.height > 0) targetWidth = originalWidth * (this.config.filter.height / originalHeight);
|
||||||
|
if (this.config.filter.height > 0) targetHeight = this.config.filter.height;
|
||||||
|
else if (this.config.filter.width > 0) targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||||
|
if (!this.inCanvas || (this.inCanvas.width !== originalWidth) || (this.inCanvas.height !== originalHeight)) {
|
||||||
|
this.inCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
|
||||||
|
if (this.inCanvas.width !== targetWidth) this.inCanvas.width = targetWidth;
|
||||||
|
if (this.inCanvas.height !== targetHeight) this.inCanvas.height = targetHeight;
|
||||||
|
}
|
||||||
|
const ctx = this.inCanvas.getContext('2d');
|
||||||
|
if (input instanceof ImageData) ctx.putImageData(input, 0, 0);
|
||||||
|
else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, this.inCanvas.width, this.inCanvas.height);
|
||||||
|
if (this.config.filter.enabled) {
|
||||||
|
if (!this.outCanvas || (this.inCanvas.width !== this.outCanvas.width) || (this.inCanvas.height !== this.outCanvas.height)) {
|
||||||
|
this.outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(this.inCanvas.width, this.inCanvas.height) : document.createElement('canvas');
|
||||||
|
if (this.outCanvas.width !== this.inCanvas.width) this.outCanvas.width = this.inCanvas.width;
|
||||||
|
if (this.outCanvas.height !== this.inCanvas.height) this.outCanvas.height = this.inCanvas.height;
|
||||||
|
}
|
||||||
|
if (!this.fx) this.fx = (tf.ENV.flags.IS_BROWSER && (typeof document !== 'undefined')) ? new fxImage.Canvas({ canvas: this.outCanvas }) : null;
|
||||||
|
this.fx.reset();
|
||||||
|
this.fx.addFilter('brightness', this.config.filter.brightness); // must have at least one filter enabled
|
||||||
|
if (this.config.filter.contrast !== 0) this.fx.addFilter('contrast', this.config.filter.contrast);
|
||||||
|
if (this.config.filter.sharpness !== 0) this.fx.addFilter('sharpen', this.config.filter.sharpness);
|
||||||
|
if (this.config.filter.blur !== 0) this.fx.addFilter('blur', this.config.filter.blur);
|
||||||
|
if (this.config.filter.saturation !== 0) this.fx.addFilter('saturation', this.config.filter.saturation);
|
||||||
|
if (this.config.filter.hue !== 0) this.fx.addFilter('hue', this.config.filter.hue);
|
||||||
|
if (this.config.filter.negative) this.fx.addFilter('negative');
|
||||||
|
if (this.config.filter.sepia) this.fx.addFilter('sepia');
|
||||||
|
if (this.config.filter.vintage) this.fx.addFilter('brownie');
|
||||||
|
if (this.config.filter.sepia) this.fx.addFilter('sepia');
|
||||||
|
if (this.config.filter.kodachrome) this.fx.addFilter('kodachrome');
|
||||||
|
if (this.config.filter.technicolor) this.fx.addFilter('technicolor');
|
||||||
|
if (this.config.filter.polaroid) this.fx.addFilter('polaroid');
|
||||||
|
if (this.config.filter.pixelate !== 0) this.fx.addFilter('pixelate', this.config.filter.pixelate);
|
||||||
|
this.fx.apply(this.inCanvas);
|
||||||
|
}
|
||||||
|
if (!this.outCanvas) this.outCanvas = this.inCanvas;
|
||||||
let pixels;
|
let pixels;
|
||||||
if ((this.config.backend === 'webgl') || (canvas instanceof ImageData)) {
|
if ((this.config.backend === 'webgl') || (this.outCanvas instanceof ImageData)) {
|
||||||
// tf kernel-optimized method to get imagedata, also if input is imagedata, just use it
|
// tf kernel-optimized method to get imagedata, also if input is imagedata, just use it
|
||||||
pixels = tf.browser.fromPixels(canvas);
|
pixels = tf.browser.fromPixels(this.outCanvas);
|
||||||
} else {
|
} else {
|
||||||
// cpu and wasm kernel does not implement efficient fromPixels method nor we can use canvas as-is, so we do a silly one more canvas
|
// cpu and wasm kernel does not implement efficient fromPixels method nor we can use canvas as-is, so we do a silly one more canvas
|
||||||
const tempCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
|
const tempCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
|
||||||
tempCanvas.width = targetWidth;
|
tempCanvas.width = targetWidth;
|
||||||
tempCanvas.height = targetHeight;
|
tempCanvas.height = targetHeight;
|
||||||
const tempCtx = tempCanvas.getContext('2d');
|
const tempCtx = tempCanvas.getContext('2d');
|
||||||
tempCtx.drawImage(canvas, 0, 0);
|
tempCtx.drawImage(this.outCanvas, 0, 0);
|
||||||
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
||||||
pixels = tf.browser.fromPixels(data);
|
pixels = tf.browser.fromPixels(data);
|
||||||
}
|
}
|
||||||
|
@ -218,7 +227,7 @@ class Human {
|
||||||
pixels.dispose();
|
pixels.dispose();
|
||||||
casted.dispose();
|
casted.dispose();
|
||||||
}
|
}
|
||||||
return { tensor, canvas: this.config.filter.return ? filtered : null };
|
return { tensor, canvas: this.config.filter.return ? this.outCanvas : null };
|
||||||
}
|
}
|
||||||
|
|
||||||
async detect(input, userConfig = {}) {
|
async detect(input, userConfig = {}) {
|
||||||
|
@ -239,6 +248,11 @@ class Human {
|
||||||
|
|
||||||
// eslint-disable-next-line no-async-promise-executor
|
// eslint-disable-next-line no-async-promise-executor
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
let poseRes;
|
||||||
|
let handRes;
|
||||||
|
let ssrRes;
|
||||||
|
let emotionRes;
|
||||||
|
|
||||||
const timeStart = now();
|
const timeStart = now();
|
||||||
|
|
||||||
// configure backend
|
// configure backend
|
||||||
|
@ -270,20 +284,30 @@ class Human {
|
||||||
const imageTensor = image.tensor;
|
const imageTensor = image.tensor;
|
||||||
|
|
||||||
// run posenet
|
// run posenet
|
||||||
this.state = 'run:body';
|
if (this.config.async) {
|
||||||
timeStamp = now();
|
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
this.analyze('Start PoseNet');
|
} else {
|
||||||
const poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
this.state = 'run:body';
|
||||||
this.analyze('End PoseNet:');
|
timeStamp = now();
|
||||||
perf.body = Math.trunc(now() - timeStamp);
|
this.analyze('Start PoseNet');
|
||||||
|
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||||
|
this.analyze('End PoseNet:');
|
||||||
|
perf.body = Math.trunc(now() - timeStamp);
|
||||||
|
}
|
||||||
|
|
||||||
// run handpose
|
// run handpose
|
||||||
this.state = 'run:hand';
|
if (this.config.async) {
|
||||||
timeStamp = now();
|
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
this.analyze('Start HandPose:');
|
} else {
|
||||||
const handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
this.state = 'run:hand';
|
||||||
this.analyze('End HandPose:');
|
timeStamp = now();
|
||||||
perf.hand = Math.trunc(now() - timeStamp);
|
this.analyze('Start HandPose:');
|
||||||
|
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||||
|
this.analyze('End HandPose:');
|
||||||
|
perf.hand = Math.trunc(now() - timeStamp);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.config.async) [poseRes, handRes] = await Promise.all([poseRes, handRes]);
|
||||||
|
|
||||||
// run facemesh, includes blazeface and iris
|
// run facemesh, includes blazeface and iris
|
||||||
const faceRes = [];
|
const faceRes = [];
|
||||||
|
@ -302,12 +326,12 @@ class Human {
|
||||||
// run ssr-net age & gender, inherits face from blazeface
|
// run ssr-net age & gender, inherits face from blazeface
|
||||||
this.state = 'run:agegender';
|
this.state = 'run:agegender';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const ssrData = (this.config.face.age.enabled || this.config.face.gender.enabled) ? await ssrnet.predict(face.image, this.config) : {};
|
ssrRes = (this.config.face.age.enabled || this.config.face.gender.enabled) ? await ssrnet.predict(face.image, this.config) : {};
|
||||||
perf.agegender = Math.trunc(now() - timeStamp);
|
perf.agegender = Math.trunc(now() - timeStamp);
|
||||||
// run emotion, inherits face from blazeface
|
// run emotion, inherits face from blazeface
|
||||||
this.state = 'run:emotion';
|
this.state = 'run:emotion';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const emotionData = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
||||||
perf.emotion = Math.trunc(now() - timeStamp);
|
perf.emotion = Math.trunc(now() - timeStamp);
|
||||||
|
|
||||||
// dont need face anymore
|
// dont need face anymore
|
||||||
|
@ -322,10 +346,10 @@ class Human {
|
||||||
box: face.box,
|
box: face.box,
|
||||||
mesh: face.mesh,
|
mesh: face.mesh,
|
||||||
annotations: face.annotations,
|
annotations: face.annotations,
|
||||||
age: ssrData.age,
|
age: ssrRes.age,
|
||||||
gender: ssrData.gender,
|
gender: ssrRes.gender,
|
||||||
agConfidence: ssrData.confidence,
|
agConfidence: ssrRes.confidence,
|
||||||
emotion: emotionData,
|
emotion: emotionRes,
|
||||||
iris: (iris !== 0) ? Math.trunc(100 * 11.7 /* human iris size in mm */ / iris) / 100 : 0,
|
iris: (iris !== 0) ? Math.trunc(100 * 11.7 /* human iris size in mm */ / iris) / 100 : 0,
|
||||||
});
|
});
|
||||||
this.analyze('End FaceMesh:');
|
this.analyze('End FaceMesh:');
|
||||||
|
|
|
@ -30,7 +30,7 @@ async function predict(image, config) {
|
||||||
let genderT;
|
let genderT;
|
||||||
const obj = {};
|
const obj = {};
|
||||||
|
|
||||||
if (!config.profile) {
|
if (!config.profile || config.async) {
|
||||||
if (config.face.age.enabled) promises.push(ageT = models.age.predict(enhance));
|
if (config.face.age.enabled) promises.push(ageT = models.age.predict(enhance));
|
||||||
if (config.face.gender.enabled) promises.push(genderT = models.gender.predict(enhance));
|
if (config.face.gender.enabled) promises.push(genderT = models.gender.predict(enhance));
|
||||||
await Promise.all(promises);
|
await Promise.all(promises);
|
||||||
|
@ -46,12 +46,12 @@ async function predict(image, config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ageT) {
|
if (ageT) {
|
||||||
const data = await ageT.data();
|
const data = ageT.dataSync();
|
||||||
obj.age = Math.trunc(10 * data[0]) / 10;
|
obj.age = Math.trunc(10 * data[0]) / 10;
|
||||||
tf.dispose(ageT);
|
tf.dispose(ageT);
|
||||||
}
|
}
|
||||||
if (genderT) {
|
if (genderT) {
|
||||||
const data = await genderT.data();
|
const data = genderT.dataSync();
|
||||||
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
||||||
if (confidence > config.face.gender.minConfidence) {
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
obj.gender = data[0] <= 0.5 ? 'female' : 'male';
|
obj.gender = data[0] <= 0.5 ? 'female' : 'male';
|
||||||
|
|
Loading…
Reference in New Issue