fix bug in async ops and change imports

pull/50/head
Vladimir Mandic 2020-11-10 08:57:39 -05:00
parent 42e17606d6
commit 5807fb1bfa
36 changed files with 1638 additions and 1636 deletions

View File

@ -2,10 +2,10 @@ import Human from '../dist/human.esm.js';
import draw from './draw.js'; import draw from './draw.js';
import Menu from './menu.js'; import Menu from './menu.js';
const human = new Human();
const userConfig = {}; // add any user configuration overrides const userConfig = {}; // add any user configuration overrides
const human = new Human(userConfig);
// ui options // ui options
const ui = { const ui = {
baseColor: 'rgba(173, 216, 230, 0.3)', // 'lightblue' with light alpha channel baseColor: 'rgba(173, 216, 230, 0.3)', // 'lightblue' with light alpha channel
@ -134,7 +134,7 @@ async function setupCamera() {
const constraints = { const constraints = {
audio: false, audio: false,
video: { video: {
facingMode: (ui.facing ? 'user' : 'environment'), facingMode: ui.facing ? 'user' : 'environment',
resizeMode: ui.crop ? 'crop-and-scale' : 'none', resizeMode: ui.crop ? 'crop-and-scale' : 'none',
width: { ideal: window.innerWidth }, width: { ideal: window.innerWidth },
height: { ideal: window.innerHeight }, height: { ideal: window.innerHeight },

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"demo/browser.js": { "demo/browser.js": {
"bytes": 18516, "bytes": 18524,
"imports": [ "imports": [
{ {
"path": "dist/human.esm.js" "path": "dist/human.esm.js"
@ -23,7 +23,7 @@
"imports": [] "imports": []
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"bytes": 3442260, "bytes": 3441681,
"imports": [] "imports": []
} }
}, },
@ -31,13 +31,13 @@
"dist/demo-browser-index.js.map": { "dist/demo-browser-index.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 5419827 "bytes": 5406858
}, },
"dist/demo-browser-index.js": { "dist/demo-browser-index.js": {
"imports": [], "imports": [],
"inputs": { "inputs": {
"dist/human.esm.js": { "dist/human.esm.js": {
"bytesInOutput": 3431560 "bytesInOutput": 3430970
}, },
"demo/draw.js": { "demo/draw.js": {
"bytesInOutput": 8898 "bytesInOutput": 8898
@ -46,10 +46,10 @@
"bytesInOutput": 13813 "bytesInOutput": 13813
}, },
"demo/browser.js": { "demo/browser.js": {
"bytesInOutput": 16215 "bytesInOutput": 16241
} }
}, },
"bytes": 3470608 "bytes": 3470044
} }
} }
} }

View File

@ -28,21 +28,6 @@ var __toModule = (module) => {
}; };
// src/face/blazeface.js // src/face/blazeface.js
import {
add,
concat,
concat2d,
div,
image as image3,
loadGraphModel,
mul,
sigmoid,
slice,
sub,
tensor1d,
tensor2d,
tidy
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_blazeface = __commonJS((exports) => { var require_blazeface = __commonJS((exports) => {
const NUM_LANDMARKS = 6; const NUM_LANDMARKS = 6;
function generateAnchors(inputSize) { function generateAnchors(inputSize) {
@ -72,31 +57,31 @@ var require_blazeface = __commonJS((exports) => {
}; };
const createBox = (startEndTensor) => ({ const createBox = (startEndTensor) => ({
startEndTensor, startEndTensor,
startPoint: slice(startEndTensor, [0, 0], [-1, 2]), startPoint: tf.slice(startEndTensor, [0, 0], [-1, 2]),
endPoint: slice(startEndTensor, [0, 2], [-1, 2]) endPoint: tf.slice(startEndTensor, [0, 2], [-1, 2])
}); });
const scaleBox = (box, factors) => { const scaleBox = (box, factors) => {
const starts = mul(box.startPoint, factors); const starts = tf.mul(box.startPoint, factors);
const ends = mul(box.endPoint, factors); const ends = tf.mul(box.endPoint, factors);
const newCoordinates = concat2d([starts, ends], 1); const newCoordinates = tf.concat2d([starts, ends], 1);
return createBox(newCoordinates); return createBox(newCoordinates);
}; };
function decodeBounds(boxOutputs, anchors, inputSize) { function decodeBounds(boxOutputs, anchors, inputSize) {
const boxStarts = slice(boxOutputs, [0, 1], [-1, 2]); const boxStarts = tf.slice(boxOutputs, [0, 1], [-1, 2]);
const centers = add(boxStarts, anchors); const centers = tf.add(boxStarts, anchors);
const boxSizes = slice(boxOutputs, [0, 3], [-1, 2]); const boxSizes = tf.slice(boxOutputs, [0, 3], [-1, 2]);
const boxSizesNormalized = div(boxSizes, inputSize); const boxSizesNormalized = tf.div(boxSizes, inputSize);
const centersNormalized = div(centers, inputSize); const centersNormalized = tf.div(centers, inputSize);
const halfBoxSize = div(boxSizesNormalized, 2); const halfBoxSize = tf.div(boxSizesNormalized, 2);
const starts = sub(centersNormalized, halfBoxSize); const starts = tf.sub(centersNormalized, halfBoxSize);
const ends = add(centersNormalized, halfBoxSize); const ends = tf.add(centersNormalized, halfBoxSize);
const startNormalized = mul(starts, inputSize); const startNormalized = tf.mul(starts, inputSize);
const endNormalized = mul(ends, inputSize); const endNormalized = tf.mul(ends, inputSize);
const concatAxis = 1; const concatAxis = 1;
return concat2d([startNormalized, endNormalized], concatAxis); return tf.concat2d([startNormalized, endNormalized], concatAxis);
} }
function scaleBoxFromPrediction(face2, scaleFactor) { function scaleBoxFromPrediction(face2, scaleFactor) {
return tidy(() => { return tf.tidy(() => {
const box = face2["box"] ? face2["box"] : face2; const box = face2["box"] ? face2["box"] : face2;
return scaleBox(box, scaleFactor).startEndTensor.squeeze(); return scaleBox(box, scaleFactor).startEndTensor.squeeze();
}); });
@ -107,37 +92,37 @@ var require_blazeface = __commonJS((exports) => {
this.width = config2.detector.inputSize; this.width = config2.detector.inputSize;
this.height = config2.detector.inputSize; this.height = config2.detector.inputSize;
this.anchorsData = generateAnchors(config2.detector.inputSize); this.anchorsData = generateAnchors(config2.detector.inputSize);
this.anchors = tensor2d(this.anchorsData); this.anchors = tf.tensor2d(this.anchorsData);
this.inputSize = tensor1d([this.width, this.height]); this.inputSize = tf.tensor1d([this.width, this.height]);
this.config = config2; this.config = config2;
this.scaleFaces = 0.8; this.scaleFaces = 0.8;
} }
async getBoundingBoxes(inputImage) { async getBoundingBoxes(inputImage) {
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1) if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
return null; return null;
const [detectedOutputs, boxes, scores] = tidy(() => { const [detectedOutputs, boxes, scores] = tf.tidy(() => {
const resizedImage = inputImage.resizeBilinear([this.width, this.height]); const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
const normalizedImage = sub(resizedImage.div(127.5), 1); const normalizedImage = tf.sub(resizedImage.div(127.5), 1);
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage); const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
let prediction; let prediction;
if (Array.isArray(batchedPrediction)) { if (Array.isArray(batchedPrediction)) {
const sorted = batchedPrediction.sort((a, b) => a.size - b.size); const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
const concat384 = concat([sorted[0], sorted[2]], 2); const concat384 = tf.concat([sorted[0], sorted[2]], 2);
const concat512 = concat([sorted[1], sorted[3]], 2); const concat512 = tf.concat([sorted[1], sorted[3]], 2);
const concat2 = concat([concat512, concat384], 1); const concat = tf.concat([concat512, concat384], 1);
prediction = concat2.squeeze(0); prediction = concat.squeeze(0);
} else { } else {
prediction = batchedPrediction.squeeze(); prediction = batchedPrediction.squeeze();
} }
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize); const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
const logits = slice(prediction, [0, 0], [-1, 1]); const logits = tf.slice(prediction, [0, 0], [-1, 1]);
const scoresOut = sigmoid(logits).squeeze(); const scoresOut = tf.sigmoid(logits).squeeze();
return [prediction, decodedBounds, scoresOut]; return [prediction, decodedBounds, scoresOut];
}); });
const boxIndicesTensor = await image3.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold); const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);
const boxIndices = boxIndicesTensor.arraySync(); const boxIndices = boxIndicesTensor.arraySync();
boxIndicesTensor.dispose(); boxIndicesTensor.dispose();
const boundingBoxesMap = boxIndices.map((boxIndex) => slice(boxes, [boxIndex, 0], [1, -1])); const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));
const boundingBoxes = boundingBoxesMap.map((boundingBox) => { const boundingBoxes = boundingBoxesMap.map((boundingBox) => {
const vals = boundingBox.arraySync(); const vals = boundingBox.arraySync();
boundingBox.dispose(); boundingBox.dispose();
@ -151,7 +136,7 @@ var require_blazeface = __commonJS((exports) => {
if (confidence > this.config.detector.minConfidence) { if (confidence > this.config.detector.minConfidence) {
const box = createBox(boundingBoxes[i]); const box = createBox(boundingBoxes[i]);
const anchor = this.anchorsData[boxIndex]; const anchor = this.anchorsData[boxIndex];
const landmarks = tidy(() => slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]).squeeze().reshape([NUM_LANDMARKS, -1])); const landmarks = tf.tidy(() => tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]).squeeze().reshape([NUM_LANDMARKS, -1]));
annotatedBoxes.push({box, landmarks, anchor, confidence}); annotatedBoxes.push({box, landmarks, anchor, confidence});
} }
} }
@ -291,9 +276,6 @@ var require_keypoints = __commonJS((exports) => {
}); });
// src/face/box.js // src/face/box.js
import {
image as image3
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_box = __commonJS((exports) => { var require_box = __commonJS((exports) => {
function scaleBoxCoordinates2(box, factor) { function scaleBoxCoordinates2(box, factor) {
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]]; const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
@ -315,16 +297,16 @@ var require_box = __commonJS((exports) => {
]; ];
} }
exports.getBoxCenter = getBoxCenter2; exports.getBoxCenter = getBoxCenter2;
function cutBoxFromImageAndResize2(box, image4, cropSize) { function cutBoxFromImageAndResize2(box, image2, cropSize) {
const h = image4.shape[1]; const h = image2.shape[1];
const w = image4.shape[2]; const w = image2.shape[2];
const boxes = [[ const boxes = [[
box.startPoint[1] / h, box.startPoint[1] / h,
box.startPoint[0] / w, box.startPoint[0] / w,
box.endPoint[1] / h, box.endPoint[1] / h,
box.endPoint[0] / w box.endPoint[0] / w
]]; ]];
return image3.cropAndResize(image4, boxes, [0], cropSize); return tf.image.cropAndResize(image2, boxes, [0], cropSize);
} }
exports.cutBoxFromImageAndResize = cutBoxFromImageAndResize2; exports.cutBoxFromImageAndResize = cutBoxFromImageAndResize2;
function enlargeBox2(box, factor = 1.5) { function enlargeBox2(box, factor = 1.5) {
@ -432,14 +414,6 @@ var require_util = __commonJS((exports) => {
}); });
// src/face/facepipeline.js // src/face/facepipeline.js
import {
concat,
dispose,
image as image3,
reshape,
tensor2d,
tidy
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_facepipeline = __commonJS((exports) => { var require_facepipeline = __commonJS((exports) => {
const bounding = __toModule(require_box()); const bounding = __toModule(require_box());
const keypoints = __toModule(require_keypoints()); const keypoints = __toModule(require_keypoints());
@ -519,14 +493,14 @@ var require_facepipeline = __commonJS((exports) => {
getEyeBox(rawCoords, face2, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) { getEyeBox(rawCoords, face2, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) {
const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge)); const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge));
const boxSize = bounding.getBoxSize(box); const boxSize = bounding.getBoxSize(box);
let crop = image3.cropAndResize(face2, [[ let crop = tf.image.cropAndResize(face2, [[
box.startPoint[1] / this.meshHeight, box.startPoint[1] / this.meshHeight,
box.startPoint[0] / this.meshWidth, box.startPoint[0] / this.meshWidth,
box.endPoint[1] / this.meshHeight, box.endPoint[1] / this.meshHeight,
box.endPoint[0] / this.meshWidth box.endPoint[0] / this.meshWidth
]], [0], [this.irisSize, this.irisSize]); ]], [0], [this.irisSize, this.irisSize]);
if (flip) { if (flip) {
crop = image3.flipLeftRight(crop); crop = tf.image.flipLeftRight(crop);
} }
return {box, boxSize, crop}; return {box, boxSize, crop};
} }
@ -598,7 +572,7 @@ var require_facepipeline = __commonJS((exports) => {
prediction.landmarks.dispose(); prediction.landmarks.dispose();
}); });
} }
let results = tidy(() => this.storedBoxes.map((box, i) => { let results = tf.tidy(() => this.storedBoxes.map((box, i) => {
let angle = 0; let angle = 0;
const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT; const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT;
let [indexOfMouth, indexOfForehead] = MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES; let [indexOfMouth, indexOfForehead] = MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;
@ -611,7 +585,7 @@ var require_facepipeline = __commonJS((exports) => {
let rotatedImage = input; let rotatedImage = input;
let rotationMatrix = util.IDENTITY_MATRIX; let rotationMatrix = util.IDENTITY_MATRIX;
if (angle !== 0) { if (angle !== 0) {
rotatedImage = image3.rotateWithOffset(input, angle, 0, faceCenterNormalized); rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter); rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
} }
const boxCPU = {startPoint: box.startPoint, endPoint: box.endPoint}; const boxCPU = {startPoint: box.startPoint, endPoint: box.endPoint};
@ -633,12 +607,12 @@ var require_facepipeline = __commonJS((exports) => {
coords.dispose(); coords.dispose();
return null; return null;
} }
const coordsReshaped = reshape(coords, [-1, 3]); const coordsReshaped = tf.reshape(coords, [-1, 3]);
let rawCoords = coordsReshaped.arraySync(); let rawCoords = coordsReshaped.arraySync();
if (config2.iris.enabled) { if (config2.iris.enabled) {
const {box: leftEyeBox, boxSize: leftEyeBoxSize, crop: leftEyeCrop} = this.getEyeBox(rawCoords, face2, LEFT_EYE_BOUNDS[0], LEFT_EYE_BOUNDS[1], true); const {box: leftEyeBox, boxSize: leftEyeBoxSize, crop: leftEyeCrop} = this.getEyeBox(rawCoords, face2, LEFT_EYE_BOUNDS[0], LEFT_EYE_BOUNDS[1], true);
const {box: rightEyeBox, boxSize: rightEyeBoxSize, crop: rightEyeCrop} = this.getEyeBox(rawCoords, face2, RIGHT_EYE_BOUNDS[0], RIGHT_EYE_BOUNDS[1]); const {box: rightEyeBox, boxSize: rightEyeBoxSize, crop: rightEyeCrop} = this.getEyeBox(rawCoords, face2, RIGHT_EYE_BOUNDS[0], RIGHT_EYE_BOUNDS[1]);
const eyePredictions = this.irisModel.predict(concat([leftEyeCrop, rightEyeCrop])); const eyePredictions = this.irisModel.predict(tf.concat([leftEyeCrop, rightEyeCrop]));
const eyePredictionsData = eyePredictions.dataSync(); const eyePredictionsData = eyePredictions.dataSync();
eyePredictions.dispose(); eyePredictions.dispose();
const leftEyeData = eyePredictionsData.slice(0, IRIS_NUM_COORDINATES * 3); const leftEyeData = eyePredictionsData.slice(0, IRIS_NUM_COORDINATES * 3);
@ -659,9 +633,9 @@ var require_facepipeline = __commonJS((exports) => {
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords); rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
} }
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix); const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
dispose(rawCoords); tf.dispose(rawCoords);
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData)); const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
const transformedCoords = tensor2d(transformedCoordsData); const transformedCoords = tf.tensor2d(transformedCoordsData);
const prediction = { const prediction = {
coords: transformedCoords, coords: transformedCoords,
box: landmarksBox, box: landmarksBox,
@ -1162,10 +1136,6 @@ var require_uvcoords = __commonJS((exports) => {
}); });
// src/face/facemesh.js // src/face/facemesh.js
import {
clone,
loadGraphModel
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_facemesh = __commonJS((exports) => { var require_facemesh = __commonJS((exports) => {
const blazeface = __toModule(require_blazeface()); const blazeface = __toModule(require_blazeface());
const keypoints = __toModule(require_keypoints()); const keypoints = __toModule(require_keypoints());
@ -1199,7 +1169,7 @@ var require_facemesh = __commonJS((exports) => {
box: prediction.box ? [prediction.box.startPoint[0], prediction.box.startPoint[1], prediction.box.endPoint[0] - prediction.box.startPoint[0], prediction.box.endPoint[1] - prediction.box.startPoint[1]] : 0, box: prediction.box ? [prediction.box.startPoint[0], prediction.box.startPoint[1], prediction.box.endPoint[0] - prediction.box.startPoint[0], prediction.box.endPoint[1] - prediction.box.startPoint[1]] : 0,
mesh, mesh,
annotations, annotations,
image: prediction.image ? clone(prediction.image) : null image: prediction.image ? tf.clone(prediction.image) : null
}); });
if (prediction.coords) if (prediction.coords)
prediction.coords.dispose(); prediction.coords.dispose();
@ -1254,13 +1224,6 @@ var require_profile = __commonJS((exports) => {
}); });
// src/age/age.js // src/age/age.js
import {
dispose,
image as image3,
loadGraphModel,
mul,
profile as profile3
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_age = __commonJS((exports) => { var require_age = __commonJS((exports) => {
const profile2 = __toModule(require_profile()); const profile2 = __toModule(require_profile());
const models = {}; const models = {};
@ -1274,7 +1237,7 @@ var require_age = __commonJS((exports) => {
} }
return models.age; return models.age;
} }
async function predict(image4, config2) { async function predict(image2, config2) {
if (frame < config2.face.age.skipFrames && last.age && last.age > 0) { if (frame < config2.face.age.skipFrames && last.age && last.age > 0) {
frame += 1; frame += 1;
return last; return last;
@ -1282,21 +1245,21 @@ var require_age = __commonJS((exports) => {
frame = 0; frame = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const box = [[ const box = [[
image4.shape[1] * zoom[0] / image4.shape[1], image2.shape[1] * zoom[0] / image2.shape[1],
image4.shape[2] * zoom[1] / image4.shape[2], image2.shape[2] * zoom[1] / image2.shape[2],
(image4.shape[1] - image4.shape[1] * zoom[0]) / image4.shape[1], (image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
(image4.shape[2] - image4.shape[2] * zoom[1]) / image4.shape[2] (image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
]]; ]];
const resize = image3.cropAndResize(image4, box, [0], [config2.face.age.inputSize, config2.face.age.inputSize]); const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.age.inputSize, config2.face.age.inputSize]);
const enhance = mul(resize, [255]); const enhance = tf.mul(resize, [255]);
dispose(resize); tf.dispose(resize);
let ageT; let ageT;
const obj = {}; const obj = {};
if (!config2.profile) { if (!config2.profile) {
if (config2.face.age.enabled) if (config2.face.age.enabled)
ageT = await models.age.predict(enhance); ageT = await models.age.predict(enhance);
} else { } else {
const profileAge = config2.face.age.enabled ? await profile3(() => models.age.predict(enhance)) : {}; const profileAge = config2.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone(); ageT = profileAge.result.clone();
profileAge.result.dispose(); profileAge.result.dispose();
profile2.run("age", profileAge); profile2.run("age", profileAge);
@ -1316,16 +1279,6 @@ var require_age = __commonJS((exports) => {
}); });
// src/gender/gender.js // src/gender/gender.js
import {
addN,
dispose,
image as image3,
loadGraphModel,
mul,
profile as profile3,
split,
tidy
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_gender = __commonJS((exports) => { var require_gender = __commonJS((exports) => {
const profile2 = __toModule(require_profile()); const profile2 = __toModule(require_profile());
const models = {}; const models = {};
@ -1342,7 +1295,7 @@ var require_gender = __commonJS((exports) => {
} }
return models.gender; return models.gender;
} }
async function predict(image4, config2) { async function predict(image2, config2) {
if (frame < config2.face.gender.skipFrames && last.gender !== "") { if (frame < config2.face.gender.skipFrames && last.gender !== "") {
frame += 1; frame += 1;
return last; return last;
@ -1350,33 +1303,33 @@ var require_gender = __commonJS((exports) => {
frame = 0; frame = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const box = [[ const box = [[
image4.shape[1] * zoom[0] / image4.shape[1], image2.shape[1] * zoom[0] / image2.shape[1],
image4.shape[2] * zoom[1] / image4.shape[2], image2.shape[2] * zoom[1] / image2.shape[2],
(image4.shape[1] - image4.shape[1] * zoom[0]) / image4.shape[1], (image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
(image4.shape[2] - image4.shape[2] * zoom[1]) / image4.shape[2] (image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
]]; ]];
const resize = image3.cropAndResize(image4, box, [0], [config2.face.gender.inputSize, config2.face.gender.inputSize]); const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.gender.inputSize, config2.face.gender.inputSize]);
let enhance; let enhance;
if (alternative) { if (alternative) {
enhance = tidy(() => { enhance = tf.tidy(() => {
const [red, green, blue] = split(resize, 3, 3); const [red, green, blue] = tf.split(resize, 3, 3);
const redNorm = mul(red, rgb[0]); const redNorm = tf.mul(red, rgb[0]);
const greenNorm = mul(green, rgb[1]); const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = mul(blue, rgb[2]); const blueNorm = tf.mul(blue, rgb[2]);
const grayscale = addN([redNorm, greenNorm, blueNorm]); const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
return grayscale.sub(0.5).mul(2); return grayscale.sub(0.5).mul(2);
}); });
} else { } else {
enhance = mul(resize, [255]); enhance = tf.mul(resize, [255]);
} }
dispose(resize); tf.dispose(resize);
let genderT; let genderT;
const obj = {}; const obj = {};
if (!config2.profile) { if (!config2.profile) {
if (config2.face.gender.enabled) if (config2.face.gender.enabled)
genderT = await models.gender.predict(enhance); genderT = await models.gender.predict(enhance);
} else { } else {
const profileGender = config2.face.gender.enabled ? await profile3(() => models.gender.predict(enhance)) : {}; const profileGender = config2.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone(); genderT = profileGender.result.clone();
profileGender.result.dispose(); profileGender.result.dispose();
profile2.run("gender", profileGender); profile2.run("gender", profileGender);
@ -1408,16 +1361,6 @@ var require_gender = __commonJS((exports) => {
}); });
// src/emotion/emotion.js // src/emotion/emotion.js
import {
addN,
dispose,
image as image3,
loadGraphModel,
mul,
profile as profile3,
split,
tidy
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_emotion = __commonJS((exports) => { var require_emotion = __commonJS((exports) => {
const profile2 = __toModule(require_profile()); const profile2 = __toModule(require_profile());
const annotations = ["angry", "disgust", "fear", "happy", "sad", "surpise", "neutral"]; const annotations = ["angry", "disgust", "fear", "happy", "sad", "surpise", "neutral"];
@ -1434,7 +1377,7 @@ var require_emotion = __commonJS((exports) => {
} }
return models.emotion; return models.emotion;
} }
async function predict(image4, config2) { async function predict(image2, config2) {
if (frame < config2.face.emotion.skipFrames && last.length > 0) { if (frame < config2.face.emotion.skipFrames && last.length > 0) {
frame += 1; frame += 1;
return last; return last;
@ -1442,25 +1385,25 @@ var require_emotion = __commonJS((exports) => {
frame = 0; frame = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const box = [[ const box = [[
image4.shape[1] * zoom[0] / image4.shape[1], image2.shape[1] * zoom[0] / image2.shape[1],
image4.shape[2] * zoom[1] / image4.shape[2], image2.shape[2] * zoom[1] / image2.shape[2],
(image4.shape[1] - image4.shape[1] * zoom[0]) / image4.shape[1], (image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
(image4.shape[2] - image4.shape[2] * zoom[1]) / image4.shape[2] (image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
]]; ]];
const resize = image3.cropAndResize(image4, box, [0], [config2.face.emotion.inputSize, config2.face.emotion.inputSize]); const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.emotion.inputSize, config2.face.emotion.inputSize]);
const [red, green, blue] = split(resize, 3, 3); const [red, green, blue] = tf.split(resize, 3, 3);
resize.dispose(); resize.dispose();
const redNorm = mul(red, rgb[0]); const redNorm = tf.mul(red, rgb[0]);
const greenNorm = mul(green, rgb[1]); const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = mul(blue, rgb[2]); const blueNorm = tf.mul(blue, rgb[2]);
red.dispose(); red.dispose();
green.dispose(); green.dispose();
blue.dispose(); blue.dispose();
const grayscale = addN([redNorm, greenNorm, blueNorm]); const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
redNorm.dispose(); redNorm.dispose();
greenNorm.dispose(); greenNorm.dispose();
blueNorm.dispose(); blueNorm.dispose();
const normalize = tidy(() => grayscale.sub(0.5).mul(2)); const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));
grayscale.dispose(); grayscale.dispose();
const obj = []; const obj = [];
if (config2.face.emotion.enabled) { if (config2.face.emotion.enabled) {
@ -1468,9 +1411,9 @@ var require_emotion = __commonJS((exports) => {
if (!config2.profile) { if (!config2.profile) {
const emotionT = await models.emotion.predict(normalize); const emotionT = await models.emotion.predict(normalize);
data2 = emotionT.dataSync(); data2 = emotionT.dataSync();
dispose(emotionT); tf.dispose(emotionT);
} else { } else {
const profileData = await profile3(() => models.emotion.predict(normalize)); const profileData = await tf.profile(() => models.emotion.predict(normalize));
data2 = profileData.result.dataSync(); data2 = profileData.result.dataSync();
profileData.result.dispose(); profileData.result.dispose();
profile2.run("emotion", profileData); profile2.run("emotion", profileData);
@ -1491,9 +1434,6 @@ var require_emotion = __commonJS((exports) => {
}); });
// src/body/modelBase.js // src/body/modelBase.js
import {
tidy
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_modelBase = __commonJS((exports) => { var require_modelBase = __commonJS((exports) => {
class BaseModel { class BaseModel {
constructor(model, outputStride) { constructor(model, outputStride) {
@ -1501,7 +1441,7 @@ var require_modelBase = __commonJS((exports) => {
this.outputStride = outputStride; this.outputStride = outputStride;
} }
predict(input) { predict(input) {
return tidy(() => { return tf.tidy(() => {
const asFloat = this.preprocessInput(input.toFloat()); const asFloat = this.preprocessInput(input.toFloat());
const asBatch = asFloat.expandDims(0); const asBatch = asFloat.expandDims(0);
const results = this.model.predict(asBatch); const results = this.model.predict(asBatch);
@ -1523,15 +1463,11 @@ var require_modelBase = __commonJS((exports) => {
}); });
// src/body/modelMobileNet.js // src/body/modelMobileNet.js
import {
div,
tidy
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_modelMobileNet = __commonJS((exports) => { var require_modelMobileNet = __commonJS((exports) => {
const modelBase = __toModule(require_modelBase()); const modelBase = __toModule(require_modelBase());
class MobileNet extends modelBase.BaseModel { class MobileNet extends modelBase.BaseModel {
preprocessInput(input) { preprocessInput(input) {
return tidy(() => div(input, 127.5).sub(1)); return tf.tidy(() => tf.div(input, 127.5).sub(1));
} }
nameOutputResults(results) { nameOutputResults(results) {
const [offsets, heatmap, displacementFwd, displacementBwd] = results; const [offsets, heatmap, displacementFwd, displacementBwd] = results;
@ -1946,8 +1882,8 @@ var require_util2 = __commonJS((exports) => {
}; };
} }
exports.scalePose = scalePose; exports.scalePose = scalePose;
function resizeTo(image3, [targetH, targetW]) { function resizeTo(image2, [targetH, targetW]) {
const input = image3.squeeze(0); const input = image2.squeeze(0);
const resized = input.resizeBilinear([targetH, targetW]); const resized = input.resizeBilinear([targetH, targetW]);
input.dispose(); input.dispose();
return resized; return resized;
@ -1961,9 +1897,6 @@ var require_util2 = __commonJS((exports) => {
}); });
// src/body/modelPoseNet.js // src/body/modelPoseNet.js
import {
loadGraphModel
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_modelPoseNet = __commonJS((exports) => { var require_modelPoseNet = __commonJS((exports) => {
const modelMobileNet = __toModule(require_modelMobileNet()); const modelMobileNet = __toModule(require_modelMobileNet());
const decodeMultiple = __toModule(require_decodeMultiple()); const decodeMultiple = __toModule(require_decodeMultiple());
@ -2031,19 +1964,6 @@ var require_posenet = __commonJS((exports) => {
}); });
// src/hand/handdetector.js // src/hand/handdetector.js
import {
add,
concat2d,
div,
image as image3,
mul,
sigmoid,
slice,
sub,
tensor1d,
tensor2d,
tidy
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_handdetector = __commonJS((exports) => { var require_handdetector = __commonJS((exports) => {
/** /**
* @license * @license
@ -2065,46 +1985,46 @@ var require_handdetector = __commonJS((exports) => {
constructor(model, inputSize, anchorsAnnotated) { constructor(model, inputSize, anchorsAnnotated) {
this.model = model; this.model = model;
this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]); this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);
this.anchorsTensor = tensor2d(this.anchors); this.anchorsTensor = tf.tensor2d(this.anchors);
this.inputSizeTensor = tensor1d([inputSize, inputSize]); this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]);
this.doubleInputSizeTensor = tensor1d([inputSize * 2, inputSize * 2]); this.doubleInputSizeTensor = tf.tensor1d([inputSize * 2, inputSize * 2]);
} }
normalizeBoxes(boxes) { normalizeBoxes(boxes) {
return tidy(() => { return tf.tidy(() => {
const boxOffsets = slice(boxes, [0, 0], [-1, 2]); const boxOffsets = tf.slice(boxes, [0, 0], [-1, 2]);
const boxSizes = slice(boxes, [0, 2], [-1, 2]); const boxSizes = tf.slice(boxes, [0, 2], [-1, 2]);
const boxCenterPoints = add(div(boxOffsets, this.inputSizeTensor), this.anchorsTensor); const boxCenterPoints = tf.add(tf.div(boxOffsets, this.inputSizeTensor), this.anchorsTensor);
const halfBoxSizes = div(boxSizes, this.doubleInputSizeTensor); const halfBoxSizes = tf.div(boxSizes, this.doubleInputSizeTensor);
const startPoints = mul(sub(boxCenterPoints, halfBoxSizes), this.inputSizeTensor); const startPoints = tf.mul(tf.sub(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);
const endPoints = mul(add(boxCenterPoints, halfBoxSizes), this.inputSizeTensor); const endPoints = tf.mul(tf.add(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);
return concat2d([startPoints, endPoints], 1); return tf.concat2d([startPoints, endPoints], 1);
}); });
} }
normalizeLandmarks(rawPalmLandmarks, index) { normalizeLandmarks(rawPalmLandmarks, index) {
return tidy(() => { return tf.tidy(() => {
const landmarks = add(div(rawPalmLandmarks.reshape([-1, 7, 2]), this.inputSizeTensor), this.anchors[index]); const landmarks = tf.add(tf.div(rawPalmLandmarks.reshape([-1, 7, 2]), this.inputSizeTensor), this.anchors[index]);
return mul(landmarks, this.inputSizeTensor); return tf.mul(landmarks, this.inputSizeTensor);
}); });
} }
async getBoxes(input, config2) { async getBoxes(input, config2) {
const batched = this.model.predict(input); const batched = this.model.predict(input);
const predictions = batched.squeeze(); const predictions = batched.squeeze();
batched.dispose(); batched.dispose();
const scores = tidy(() => sigmoid(slice(predictions, [0, 0], [-1, 1])).squeeze()); const scores = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze());
const scoresVal = scores.dataSync(); const scoresVal = scores.dataSync();
const rawBoxes = slice(predictions, [0, 1], [-1, 4]); const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);
const boxes = this.normalizeBoxes(rawBoxes); const boxes = this.normalizeBoxes(rawBoxes);
rawBoxes.dispose(); rawBoxes.dispose();
const filteredT = await image3.nonMaxSuppressionAsync(boxes, scores, config2.maxHands, config2.iouThreshold, config2.scoreThreshold); const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config2.maxHands, config2.iouThreshold, config2.scoreThreshold);
const filtered = filteredT.arraySync(); const filtered = filteredT.arraySync();
scores.dispose(); scores.dispose();
filteredT.dispose(); filteredT.dispose();
const hands = []; const hands = [];
for (const boxIndex of filtered) { for (const boxIndex of filtered) {
if (scoresVal[boxIndex] >= config2.minConfidence) { if (scoresVal[boxIndex] >= config2.minConfidence) {
const matchingBox = slice(boxes, [boxIndex, 0], [1, -1]); const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);
const rawPalmLandmarks = slice(predictions, [boxIndex, 5], [1, 14]); const rawPalmLandmarks = tf.slice(predictions, [boxIndex, 5], [1, 14]);
const palmLandmarks = tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2])); const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));
rawPalmLandmarks.dispose(); rawPalmLandmarks.dispose();
hands.push({box: matchingBox, palmLandmarks, confidence: scoresVal[boxIndex]}); hands.push({box: matchingBox, palmLandmarks, confidence: scoresVal[boxIndex]});
} }
@ -2116,9 +2036,9 @@ var require_handdetector = __commonJS((exports) => {
async estimateHandBounds(input, config2) { async estimateHandBounds(input, config2) {
const inputHeight = input.shape[1]; const inputHeight = input.shape[1];
const inputWidth = input.shape[2]; const inputWidth = input.shape[2];
const image4 = tidy(() => input.resizeBilinear([config2.inputSize, config2.inputSize]).div(127.5).sub(1)); const image2 = tf.tidy(() => input.resizeBilinear([config2.inputSize, config2.inputSize]).div(127.5).sub(1));
const predictions = await this.getBoxes(image4, config2); const predictions = await this.getBoxes(image2, config2);
image4.dispose(); image2.dispose();
if (!predictions || predictions.length === 0) if (!predictions || predictions.length === 0)
return null; return null;
const hands = []; const hands = [];
@ -2138,10 +2058,6 @@ var require_handdetector = __commonJS((exports) => {
}); });
// src/hand/handpipeline.js // src/hand/handpipeline.js
import {
image as image3,
reshape
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_handpipeline = __commonJS((exports) => { var require_handpipeline = __commonJS((exports) => {
/** /**
* @license * @license
@ -2218,13 +2134,13 @@ var require_handpipeline = __commonJS((exports) => {
coord[2] coord[2]
]); ]);
} }
async estimateHands(image4, config2) { async estimateHands(image2, config2) {
this.skipped++; this.skipped++;
let useFreshBox = false; let useFreshBox = false;
let boxes; let boxes;
if (this.skipped > config2.skipFrames || !config2.landmarks) { if (this.skipped > config2.skipFrames || !config2.landmarks) {
boxes = await this.boxDetector.estimateHandBounds(image4, config2); boxes = await this.boxDetector.estimateHandBounds(image2, config2);
if (image4.shape[1] !== 255 && image4.shape[2] !== 255) if (image2.shape[1] !== 255 && image2.shape[2] !== 255)
this.skipped = 0; this.skipped = 0;
} }
if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config2.maxHands || !config2.landmarks)) { if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config2.maxHands || !config2.landmarks)) {
@ -2243,8 +2159,8 @@ var require_handpipeline = __commonJS((exports) => {
if (config2.landmarks) { if (config2.landmarks) {
const angle = computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]); const angle = computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
const palmCenter = getBoxCenter(currentBox); const palmCenter = getBoxCenter(currentBox);
const palmCenterNormalized = [palmCenter[0] / image4.shape[2], palmCenter[1] / image4.shape[1]]; const palmCenterNormalized = [palmCenter[0] / image2.shape[2], palmCenter[1] / image2.shape[1]];
const rotatedImage = image3.rotateWithOffset(image4, angle, 0, palmCenterNormalized); const rotatedImage = tf.image.rotateWithOffset(image2, angle, 0, palmCenterNormalized);
const rotationMatrix = buildRotationMatrix(-angle, palmCenter); const rotationMatrix = buildRotationMatrix(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox; const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]); const croppedInput = cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -2256,7 +2172,7 @@ var require_handpipeline = __commonJS((exports) => {
const confidenceValue = confidence.dataSync()[0]; const confidenceValue = confidence.dataSync()[0];
confidence.dispose(); confidence.dispose();
if (confidenceValue >= config2.minConfidence) { if (confidenceValue >= config2.minConfidence) {
const keypointsReshaped = reshape(keypoints, [-1, 3]); const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync(); const rawCoords = keypointsReshaped.arraySync();
keypoints.dispose(); keypoints.dispose();
keypointsReshaped.dispose(); keypointsReshaped.dispose();
@ -19974,9 +19890,6 @@ var require_anchors = __commonJS((exports) => {
}); });
// src/hand/handpose.js // src/hand/handpose.js
import {
loadGraphModel
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_handpose = __commonJS((exports) => { var require_handpose = __commonJS((exports) => {
const handdetector = __toModule(require_handdetector()); const handdetector = __toModule(require_handdetector());
const pipeline = __toModule(require_handpipeline()); const pipeline = __toModule(require_handpipeline());
@ -20192,8 +20105,8 @@ var require_imagefx = __commonJS((exports) => {
this.reset = function() { this.reset = function() {
_filterChain = []; _filterChain = [];
}; };
this.apply = function(image3) { this.apply = function(image2) {
_resize(image3.width, image3.height); _resize(image2.width, image2.height);
_drawCount = 0; _drawCount = 0;
if (!_sourceTexture) if (!_sourceTexture)
_sourceTexture = gl.createTexture(); _sourceTexture = gl.createTexture();
@ -20202,7 +20115,7 @@ var require_imagefx = __commonJS((exports) => {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image3); gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image2);
if (_filterChain.length === 0) { if (_filterChain.length === 0) {
_draw(); _draw();
return _canvas; return _canvas;
@ -20838,20 +20751,14 @@ var require_imagefx = __commonJS((exports) => {
}); });
// src/image.js // src/image.js
import {
ENV,
Tensor,
browser,
clone
} from "@tensorflow/tfjs/dist/tf.es2017.js";
var require_image = __commonJS((exports) => { var require_image = __commonJS((exports) => {
const fxImage = __toModule(require_imagefx()); const fxImage = __toModule(require_imagefx());
let inCanvas = null; let inCanvas = null;
let outCanvas = null; let outCanvas = null;
function process3(input, config2) { function process3(input, config2) {
let tensor; let tensor;
if (input instanceof Tensor) { if (input instanceof tf.Tensor) {
tensor = clone(input); tensor = tf.clone(input);
} else { } else {
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0; const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0; const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
@ -20884,7 +20791,7 @@ var require_image = __commonJS((exports) => {
outCanvas.width = inCanvas.width; outCanvas.width = inCanvas.width;
if (outCanvas.height !== inCanvas.height) if (outCanvas.height !== inCanvas.height)
outCanvas.height = inCanvas.height; outCanvas.height = inCanvas.height;
this.fx = ENV.flags.IS_BROWSER ? new fxImage.Canvas({canvas: outCanvas}) : null; this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({canvas: outCanvas}) : null;
} }
this.fx.reset(); this.fx.reset();
this.fx.addFilter("brightness", config2.filter.brightness); this.fx.addFilter("brightness", config2.filter.brightness);
@ -20920,7 +20827,7 @@ var require_image = __commonJS((exports) => {
} }
let pixels; let pixels;
if (config2.backend === "webgl" || outCanvas instanceof ImageData) { if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
pixels = browser.fromPixels(outCanvas); pixels = tf.browser.fromPixels(outCanvas);
} else { } else {
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas"); const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
tempCanvas.width = targetWidth; tempCanvas.width = targetWidth;
@ -20928,7 +20835,7 @@ var require_image = __commonJS((exports) => {
const tempCtx = tempCanvas.getContext("2d"); const tempCtx = tempCanvas.getContext("2d");
tempCtx.drawImage(outCanvas, 0, 0); tempCtx.drawImage(outCanvas, 0, 0);
const data2 = tempCtx.getImageData(0, 0, targetWidth, targetHeight); const data2 = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
pixels = browser.fromPixels(data2); pixels = tf.browser.fromPixels(data2);
} }
const casted = pixels.toFloat(); const casted = pixels.toFloat();
tensor = casted.expandDims(0); tensor = casted.expandDims(0);
@ -20940,9 +20847,10 @@ var require_image = __commonJS((exports) => {
exports.process = process3; exports.process = process3;
}); });
// src/human.js // src/tf.js
import * as tf2 from "@tensorflow/tfjs/dist/tf.es2017.js"; import * as tf from "@tensorflow/tfjs/dist/tf.es2017.js";
import {setWasmPaths} from "@tensorflow/tfjs-backend-wasm/dist/index.js"; import {setWasmPaths} from "@tensorflow/tfjs-backend-wasm/dist/index.js";
const loadGraphModel = tf.loadGraphModel;
// src/face/triangulation.js // src/face/triangulation.js
var triangulation_default = [ var triangulation_default = [
@ -23596,9 +23504,6 @@ const emotion = __toModule(require_emotion());
const posenet = __toModule(require_posenet()); const posenet = __toModule(require_posenet());
// src/hand/box.js // src/hand/box.js
import {
image
} from "@tensorflow/tfjs/dist/tf.es2017.js";
/** /**
* @license * @license
* Copyright 2020 Google LLC. All Rights Reserved. * Copyright 2020 Google LLC. All Rights Reserved.
@ -23627,16 +23532,16 @@ function getBoxCenter(box) {
box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2 box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2
]; ];
} }
function cutBoxFromImageAndResize(box, image3, cropSize) { function cutBoxFromImageAndResize(box, image2, cropSize) {
const h = image3.shape[1]; const h = image2.shape[1];
const w = image3.shape[2]; const w = image2.shape[2];
const boxes = [[ const boxes = [[
box.startPoint[1] / h, box.startPoint[1] / h,
box.startPoint[0] / w, box.startPoint[0] / w,
box.endPoint[1] / h, box.endPoint[1] / h,
box.endPoint[0] / w box.endPoint[0] / w
]]; ]];
return image.cropAndResize(image3, boxes, [0], cropSize); return tf.image.cropAndResize(image2, boxes, [0], cropSize);
} }
function scaleBoxCoordinates(box, factor) { function scaleBoxCoordinates(box, factor) {
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]]; const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
@ -23757,7 +23662,7 @@ function rotatePoint(homogeneousCoordinate, rotationMatrix) {
// src/human.js // src/human.js
const handpose = __toModule(require_handpose()); const handpose = __toModule(require_handpose());
const gesture = __toModule(require_gesture()); const gesture = __toModule(require_gesture());
const image2 = __toModule(require_image()); const image = __toModule(require_image());
const profile = __toModule(require_profile()); const profile = __toModule(require_profile());
// config.js // config.js
@ -23861,7 +23766,7 @@ var config_default = {
}; };
// package.json // package.json
var version = "0.8.5"; var version = "0.8.6";
// src/human.js // src/human.js
const disableSkipFrames = { const disableSkipFrames = {
@ -23892,7 +23797,7 @@ function mergeDeep(...objects) {
} }
class Human { class Human {
constructor(userConfig = {}) { constructor(userConfig = {}) {
this.tf = tf2; this.tf = tf;
this.version = version; this.version = version;
this.config = mergeDeep(config_default, userConfig); this.config = mergeDeep(config_default, userConfig);
this.fx = null; this.fx = null;
@ -23930,7 +23835,7 @@ class Human {
analyze(...msg) { analyze(...msg) {
if (!this.analyzeMemoryLeaks) if (!this.analyzeMemoryLeaks)
return; return;
const current = tf2.engine().state.numTensors; const current = tf.engine().state.numTensors;
const previous = this.numTensors; const previous = this.numTensors;
this.numTensors = current; this.numTensors = current;
const leaked = current - previous; const leaked = current - previous;
@ -23942,11 +23847,11 @@ class Human {
return null; return null;
if (!input) if (!input)
return "input is not defined"; return "input is not defined";
if (tf2.ENV.flags.IS_NODE && !(input instanceof tf2.Tensor)) { if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
return "input must be a tensor"; return "input must be a tensor";
} }
try { try {
tf2.getBackend(); tf.getBackend();
} catch (e) { } catch (e) {
return "backend not loaded"; return "backend not loaded";
} }
@ -23959,9 +23864,9 @@ class Human {
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (this.firstRun) { if (this.firstRun) {
this.checkBackend(true); this.checkBackend(true);
this.log(`version: ${this.version} TensorFlow/JS version: ${tf2.version_core}`); this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
this.log("configuration:", this.config); this.log("configuration:", this.config);
this.log("flags:", tf2.ENV.flags); this.log("flags:", tf.ENV.flags);
this.firstRun = false; this.firstRun = false;
} }
if (this.config.async) { if (this.config.async) {
@ -23973,12 +23878,12 @@ class Human {
this.models.posenet, this.models.posenet,
this.models.handpose this.models.handpose
] = await Promise.all([ ] = await Promise.all([
this.models.age || age.load(this.config), this.config.face.age.enabled ? this.models.age || age.load(this.config) : null,
this.models.gender || gender.load(this.config), this.config.face.gender.enabled ? this.models.gender || gender.load(this.config) : null,
this.models.emotion || emotion.load(this.config), this.config.face.emotion.enabled ? this.models.emotion || emotion.load(this.config) : null,
this.models.facemesh || facemesh.load(this.config.face), this.config.face.enabled ? this.models.facemesh || facemesh.load(this.config.face) : null,
this.models.posenet || posenet.load(this.config), this.config.body.enabled ? this.models.posenet || posenet.load(this.config) : null,
this.models.handpose || handpose.load(this.config.hand) this.config.hand.enabled ? this.models.handpose || handpose.load(this.config.hand) : null
]); ]);
} else { } else {
if (this.config.face.enabled && !this.models.facemesh) if (this.config.face.enabled && !this.models.facemesh)
@ -24000,26 +23905,26 @@ class Human {
} }
async checkBackend(force) { async checkBackend(force) {
const timeStamp = now(); const timeStamp = now();
if (this.config.backend && this.config.backend !== "" && force || tf2.getBackend() !== this.config.backend) { if (this.config.backend && this.config.backend !== "" && force || tf.getBackend() !== this.config.backend) {
this.state = "backend"; this.state = "backend";
this.log("setting backend:", this.config.backend); this.log("setting backend:", this.config.backend);
if (this.config.backend === "wasm") { if (this.config.backend === "wasm") {
this.log("settings wasm path:", this.config.wasmPath); this.log("settings wasm path:", this.config.wasmPath);
setWasmPaths(this.config.wasmPath); setWasmPaths(this.config.wasmPath);
const simd = await tf2.env().getAsync("WASM_HAS_SIMD_SUPPORT"); const simd = await tf.env().getAsync("WASM_HAS_SIMD_SUPPORT");
if (!simd) if (!simd)
this.log("warning: wasm simd support is not enabled"); this.log("warning: wasm simd support is not enabled");
} }
await tf2.setBackend(this.config.backend); await tf.setBackend(this.config.backend);
tf2.enableProdMode(); tf.enableProdMode();
if (this.config.backend === "webgl") { if (this.config.backend === "webgl") {
if (this.config.deallocate) { if (this.config.deallocate) {
this.log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", this.config.deallocate); this.log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", this.config.deallocate);
tf2.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", this.config.deallocate ? 0 : -1); tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", this.config.deallocate ? 0 : -1);
} }
tf2.ENV.set("WEBGL_PACK_DEPTHWISECONV", true); tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", true);
} }
await tf2.ready(); await tf.ready();
} }
const current = Math.trunc(now() - timeStamp); const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.backend || 0)) if (current > (this.perf.backend || 0))
@ -24121,10 +24026,10 @@ class Human {
await this.checkBackend(); await this.checkBackend();
await this.load(); await this.load();
if (this.config.scoped) if (this.config.scoped)
tf2.engine().startScope(); tf.engine().startScope();
this.analyze("Start Scope:"); this.analyze("Start Scope:");
timeStamp = now(); timeStamp = now();
const process3 = image2.process(input, this.config); const process3 = image.process(input, this.config);
this.perf.image = Math.trunc(now() - timeStamp); this.perf.image = Math.trunc(now() - timeStamp);
this.analyze("Get Image:"); this.analyze("Get Image:");
if (this.config.async) { if (this.config.async) {
@ -24165,7 +24070,7 @@ class Human {
} }
process3.tensor.dispose(); process3.tensor.dispose();
if (this.config.scoped) if (this.config.scoped)
tf2.engine().endScope(); tf.engine().endScope();
this.analyze("End Scope:"); this.analyze("End Scope:");
let gestureRes = []; let gestureRes = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {

File diff suppressed because one or more lines are too long

View File

@ -9,8 +9,11 @@
"imports": [] "imports": []
}, },
"src/age/age.js": { "src/age/age.js": {
"bytes": 1926, "bytes": 1912,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/profile.js" "path": "src/profile.js"
} }
@ -58,20 +61,30 @@
"imports": [] "imports": []
}, },
"src/body/modelBase.js": { "src/body/modelBase.js": {
"bytes": 901, "bytes": 874,
"imports": [] "imports": [
{
"path": "src/tf.js"
}
]
}, },
"src/body/modelMobileNet.js": { "src/body/modelMobileNet.js": {
"bytes": 611, "bytes": 584,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/body/modelBase.js" "path": "src/body/modelBase.js"
} }
] ]
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytes": 1940, "bytes": 1922,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/body/modelMobileNet.js" "path": "src/body/modelMobileNet.js"
}, },
@ -120,24 +133,38 @@
] ]
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytes": 2942, "bytes": 2928,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/profile.js" "path": "src/profile.js"
} }
] ]
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytes": 6955, "bytes": 6941,
"imports": [] "imports": [
{
"path": "src/tf.js"
}
]
}, },
"src/face/box.js": { "src/face/box.js": {
"bytes": 1947, "bytes": 1920,
"imports": [] "imports": [
{
"path": "src/tf.js"
}
]
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytes": 2621, "bytes": 2604,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/face/blazeface.js" "path": "src/face/blazeface.js"
}, },
@ -156,8 +183,11 @@
] ]
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytes": 13713, "bytes": 13686,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/face/box.js" "path": "src/face/box.js"
}, },
@ -186,8 +216,11 @@
"imports": [] "imports": []
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytes": 3207, "bytes": 3193,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/profile.js" "path": "src/profile.js"
} }
@ -202,20 +235,30 @@
"imports": [] "imports": []
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytes": 3238, "bytes": 3211,
"imports": [] "imports": [
{
"path": "src/tf.js"
}
]
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytes": 4248, "bytes": 4221,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/hand/box.js" "path": "src/hand/box.js"
} }
] ]
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytes": 7559, "bytes": 7532,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/hand/box.js" "path": "src/hand/box.js"
}, },
@ -225,8 +268,11 @@
] ]
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytes": 3047, "bytes": 3026,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/hand/handdetector.js" "path": "src/hand/handdetector.js"
}, },
@ -243,8 +289,11 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 14315, "bytes": 14448,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/face/facemesh.js" "path": "src/face/facemesh.js"
}, },
@ -281,8 +330,11 @@
] ]
}, },
"src/image.js": { "src/image.js": {
"bytes": 4666, "bytes": 4638,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/imagefx.js" "path": "src/imagefx.js"
} }
@ -295,55 +347,59 @@
"src/profile.js": { "src/profile.js": {
"bytes": 1061, "bytes": 1061,
"imports": [] "imports": []
},
"src/tf.js": {
"bytes": 871,
"imports": []
} }
}, },
"outputs": { "outputs": {
"dist/human.esm-nobundle.js.map": { "dist/human.esm-nobundle.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 780280 "bytes": 766195
}, },
"dist/human.esm-nobundle.js": { "dist/human.esm-nobundle.js": {
"imports": [], "imports": [],
"inputs": { "inputs": {
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytesInOutput": 7075 "bytesInOutput": 6970
}, },
"src/face/keypoints.js": { "src/face/keypoints.js": {
"bytesInOutput": 2768 "bytesInOutput": 2768
}, },
"src/face/box.js": { "src/face/box.js": {
"bytesInOutput": 2122 "bytesInOutput": 2052
}, },
"src/face/util.js": { "src/face/util.js": {
"bytesInOutput": 3043 "bytesInOutput": 3043
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytesInOutput": 12482 "bytesInOutput": 12379
}, },
"src/face/uvcoords.js": { "src/face/uvcoords.js": {
"bytesInOutput": 20584 "bytesInOutput": 20584
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytesInOutput": 2641 "bytesInOutput": 2564
}, },
"src/profile.js": { "src/profile.js": {
"bytesInOutput": 1118 "bytesInOutput": 1118
}, },
"src/age/age.js": { "src/age/age.js": {
"bytesInOutput": 1947 "bytesInOutput": 1826
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytesInOutput": 3108 "bytesInOutput": 2980
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytesInOutput": 2828 "bytesInOutput": 2697
}, },
"src/body/modelBase.js": { "src/body/modelBase.js": {
"bytesInOutput": 924 "bytesInOutput": 866
}, },
"src/body/modelMobileNet.js": { "src/body/modelMobileNet.js": {
"bytesInOutput": 533 "bytesInOutput": 471
}, },
"src/body/heapSort.js": { "src/body/heapSort.js": {
"bytesInOutput": 1637 "bytesInOutput": 1637
@ -367,22 +423,22 @@
"bytesInOutput": 2410 "bytesInOutput": 2410
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytesInOutput": 2083 "bytesInOutput": 2012
}, },
"src/body/posenet.js": { "src/body/posenet.js": {
"bytesInOutput": 963 "bytesInOutput": 963
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytesInOutput": 4486 "bytesInOutput": 4402
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytesInOutput": 7243 "bytesInOutput": 7165
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
"bytesInOutput": 256590 "bytesInOutput": 256590
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytesInOutput": 3135 "bytesInOutput": 3064
}, },
"src/gesture.js": { "src/gesture.js": {
"bytesInOutput": 3038 "bytesInOutput": 3038
@ -391,10 +447,10 @@
"bytesInOutput": 20097 "bytesInOutput": 20097
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 4558 "bytesInOutput": 4483
}, },
"src/human.js": { "src/tf.js": {
"bytesInOutput": 133 "bytesInOutput": 174
}, },
"src/face/triangulation.js": { "src/face/triangulation.js": {
"bytesInOutput": 17898 "bytesInOutput": 17898
@ -403,13 +459,13 @@
"bytesInOutput": 227 "bytesInOutput": 227
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytesInOutput": 3096 "bytesInOutput": 3037
}, },
"src/hand/util.js": { "src/hand/util.js": {
"bytesInOutput": 2884 "bytesInOutput": 2884
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 187 "bytesInOutput": 186
}, },
"config.js": { "config.js": {
"bytesInOutput": 1933 "bytesInOutput": 1933
@ -421,13 +477,13 @@
"bytesInOutput": 23 "bytesInOutput": 23
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 11792 "bytesInOutput": 11998
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 0 "bytesInOutput": 0
} }
}, },
"bytes": 418961 "bytes": 417911
} }
} }
} }

454
dist/human.esm.js vendored

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

125
dist/human.esm.json vendored
View File

@ -12316,10 +12316,10 @@
"imports": [] "imports": []
}, },
"src/age/age.js": { "src/age/age.js": {
"bytes": 1926, "bytes": 1912,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/profile.js" "path": "src/profile.js"
@ -12368,18 +12368,18 @@
"imports": [] "imports": []
}, },
"src/body/modelBase.js": { "src/body/modelBase.js": {
"bytes": 901, "bytes": 874,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
} }
] ]
}, },
"src/body/modelMobileNet.js": { "src/body/modelMobileNet.js": {
"bytes": 611, "bytes": 584,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/body/modelBase.js" "path": "src/body/modelBase.js"
@ -12387,10 +12387,10 @@
] ]
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytes": 1940, "bytes": 1922,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/body/modelMobileNet.js" "path": "src/body/modelMobileNet.js"
@ -12440,10 +12440,10 @@
] ]
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytes": 2942, "bytes": 2928,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/profile.js" "path": "src/profile.js"
@ -12451,26 +12451,26 @@
] ]
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytes": 6955, "bytes": 6941,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
} }
] ]
}, },
"src/face/box.js": { "src/face/box.js": {
"bytes": 1947, "bytes": 1920,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
} }
] ]
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytes": 2621, "bytes": 2604,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/face/blazeface.js" "path": "src/face/blazeface.js"
@ -12490,10 +12490,10 @@
] ]
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytes": 13713, "bytes": 13686,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/face/box.js" "path": "src/face/box.js"
@ -12523,10 +12523,10 @@
"imports": [] "imports": []
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytes": 3207, "bytes": 3193,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/profile.js" "path": "src/profile.js"
@ -12542,18 +12542,18 @@
"imports": [] "imports": []
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytes": 3238, "bytes": 3211,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
} }
] ]
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytes": 4248, "bytes": 4221,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/hand/box.js" "path": "src/hand/box.js"
@ -12561,10 +12561,10 @@
] ]
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytes": 7559, "bytes": 7532,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/hand/box.js" "path": "src/hand/box.js"
@ -12575,10 +12575,10 @@
] ]
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytes": 3047, "bytes": 3026,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/hand/handdetector.js" "path": "src/hand/handdetector.js"
@ -12596,13 +12596,10 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 14315, "bytes": 14448,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js"
}, },
{ {
"path": "src/face/facemesh.js" "path": "src/face/facemesh.js"
@ -12640,10 +12637,10 @@
] ]
}, },
"src/image.js": { "src/image.js": {
"bytes": 4666, "bytes": 4638,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/imagefx.js" "path": "src/imagefx.js"
@ -12658,6 +12655,17 @@
"bytes": 1061, "bytes": 1061,
"imports": [] "imports": []
}, },
"src/tf.js": {
"bytes": 871,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js"
}
]
},
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": { "empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
"bytes": 0, "bytes": 0,
"imports": [] "imports": []
@ -12683,7 +12691,7 @@
"dist/human.esm.js.map": { "dist/human.esm.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 5469731 "bytes": 5456850
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"imports": [], "imports": [],
@ -12698,7 +12706,7 @@
"bytesInOutput": 6782 "bytesInOutput": 6782
}, },
"node_modules/@tensorflow/tfjs/dist/tf.es2017.js": { "node_modules/@tensorflow/tfjs/dist/tf.es2017.js": {
"bytesInOutput": 2548112 "bytesInOutput": 2548114
}, },
"empty:path": { "empty:path": {
"bytesInOutput": 42 "bytesInOutput": 42
@ -12716,43 +12724,43 @@
"bytesInOutput": 39631 "bytesInOutput": 39631
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytesInOutput": 7053 "bytesInOutput": 6972
}, },
"src/face/keypoints.js": { "src/face/keypoints.js": {
"bytesInOutput": 2768 "bytesInOutput": 2768
}, },
"src/face/box.js": { "src/face/box.js": {
"bytesInOutput": 2100 "bytesInOutput": 2052
}, },
"src/face/util.js": { "src/face/util.js": {
"bytesInOutput": 3043 "bytesInOutput": 3043
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytesInOutput": 12452 "bytesInOutput": 12397
}, },
"src/face/uvcoords.js": { "src/face/uvcoords.js": {
"bytesInOutput": 20584 "bytesInOutput": 20584
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytesInOutput": 2620 "bytesInOutput": 2564
}, },
"src/profile.js": { "src/profile.js": {
"bytesInOutput": 1118 "bytesInOutput": 1118
}, },
"src/age/age.js": { "src/age/age.js": {
"bytesInOutput": 1881 "bytesInOutput": 1826
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytesInOutput": 3041 "bytesInOutput": 2980
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytesInOutput": 2757 "bytesInOutput": 2697
}, },
"src/body/modelBase.js": { "src/body/modelBase.js": {
"bytesInOutput": 914 "bytesInOutput": 866
}, },
"src/body/modelMobileNet.js": { "src/body/modelMobileNet.js": {
"bytesInOutput": 520 "bytesInOutput": 471
}, },
"src/body/heapSort.js": { "src/body/heapSort.js": {
"bytesInOutput": 1639 "bytesInOutput": 1639
@ -12776,22 +12784,22 @@
"bytesInOutput": 2412 "bytesInOutput": 2412
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytesInOutput": 2071 "bytesInOutput": 2020
}, },
"src/body/posenet.js": { "src/body/posenet.js": {
"bytesInOutput": 975 "bytesInOutput": 975
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytesInOutput": 4476 "bytesInOutput": 4402
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytesInOutput": 7214 "bytesInOutput": 7165
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
"bytesInOutput": 256590 "bytesInOutput": 256590
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytesInOutput": 3119 "bytesInOutput": 3064
}, },
"src/gesture.js": { "src/gesture.js": {
"bytesInOutput": 3038 "bytesInOutput": 3038
@ -12800,10 +12808,10 @@
"bytesInOutput": 20097 "bytesInOutput": 20097
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 4547 "bytesInOutput": 4495
}, },
"src/human.js": { "src/tf.js": {
"bytesInOutput": 45 "bytesInOutput": 44
}, },
"node_modules/@tensorflow/tfjs-core/dist/backends/backend.js": { "node_modules/@tensorflow/tfjs-core/dist/backends/backend.js": {
"bytesInOutput": 12318 "bytesInOutput": 12318
@ -13393,6 +13401,9 @@
"node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js": { "node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js": {
"bytesInOutput": 703 "bytesInOutput": 703
}, },
"src/tf.js": {
"bytesInOutput": 42
},
"src/face/triangulation.js": { "src/face/triangulation.js": {
"bytesInOutput": 17898 "bytesInOutput": 17898
}, },
@ -13400,13 +13411,13 @@
"bytesInOutput": 227 "bytesInOutput": 227
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytesInOutput": 3081 "bytesInOutput": 3037
}, },
"src/hand/util.js": { "src/hand/util.js": {
"bytesInOutput": 2884 "bytesInOutput": 2884
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 187 "bytesInOutput": 186
}, },
"config.js": { "config.js": {
"bytesInOutput": 1933 "bytesInOutput": 1933
@ -13418,13 +13429,13 @@
"bytesInOutput": 24 "bytesInOutput": 24
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 11818 "bytesInOutput": 12024
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 0 "bytesInOutput": 0
} }
}, },
"bytes": 3442260 "bytes": 3441681
} }
} }
} }

454
dist/human.js vendored

File diff suppressed because it is too large Load Diff

6
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

134
dist/human.json vendored
View File

@ -12316,10 +12316,10 @@
"imports": [] "imports": []
}, },
"src/age/age.js": { "src/age/age.js": {
"bytes": 1926, "bytes": 1912,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/profile.js" "path": "src/profile.js"
@ -12368,18 +12368,18 @@
"imports": [] "imports": []
}, },
"src/body/modelBase.js": { "src/body/modelBase.js": {
"bytes": 901, "bytes": 874,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
} }
] ]
}, },
"src/body/modelMobileNet.js": { "src/body/modelMobileNet.js": {
"bytes": 611, "bytes": 584,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/body/modelBase.js" "path": "src/body/modelBase.js"
@ -12387,10 +12387,10 @@
] ]
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytes": 1940, "bytes": 1922,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/body/modelMobileNet.js" "path": "src/body/modelMobileNet.js"
@ -12440,10 +12440,10 @@
] ]
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytes": 2942, "bytes": 2928,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/profile.js" "path": "src/profile.js"
@ -12451,26 +12451,26 @@
] ]
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytes": 6955, "bytes": 6941,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
} }
] ]
}, },
"src/face/box.js": { "src/face/box.js": {
"bytes": 1947, "bytes": 1920,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
} }
] ]
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytes": 2621, "bytes": 2604,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/face/blazeface.js" "path": "src/face/blazeface.js"
@ -12490,10 +12490,10 @@
] ]
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytes": 13713, "bytes": 13686,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/face/box.js" "path": "src/face/box.js"
@ -12523,10 +12523,10 @@
"imports": [] "imports": []
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytes": 3207, "bytes": 3193,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/profile.js" "path": "src/profile.js"
@ -12542,18 +12542,18 @@
"imports": [] "imports": []
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytes": 3238, "bytes": 3211,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
} }
] ]
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytes": 4248, "bytes": 4221,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/hand/box.js" "path": "src/hand/box.js"
@ -12561,10 +12561,10 @@
] ]
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytes": 7559, "bytes": 7532,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/hand/box.js" "path": "src/hand/box.js"
@ -12575,10 +12575,10 @@
] ]
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytes": 3047, "bytes": 3026,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/hand/handdetector.js" "path": "src/hand/handdetector.js"
@ -12596,13 +12596,10 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 14315, "bytes": 14448,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js"
}, },
{ {
"path": "src/face/facemesh.js" "path": "src/face/facemesh.js"
@ -12640,10 +12637,10 @@
] ]
}, },
"src/image.js": { "src/image.js": {
"bytes": 4666, "bytes": 4638,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js" "path": "src/tf.js"
}, },
{ {
"path": "src/imagefx.js" "path": "src/imagefx.js"
@ -12658,6 +12655,17 @@
"bytes": 1061, "bytes": 1061,
"imports": [] "imports": []
}, },
"src/tf.js": {
"bytes": 871,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js"
}
]
},
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": { "empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
"bytes": 0, "bytes": 0,
"imports": [] "imports": []
@ -12683,7 +12691,7 @@
"dist/human.js.map": { "dist/human.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 5430856 "bytes": 5432434
}, },
"dist/human.js": { "dist/human.js": {
"imports": [], "imports": [],
@ -12698,7 +12706,7 @@
"bytesInOutput": 7294 "bytesInOutput": 7294
}, },
"node_modules/@tensorflow/tfjs/dist/tf.es2017.js": { "node_modules/@tensorflow/tfjs/dist/tf.es2017.js": {
"bytesInOutput": 2663526 "bytesInOutput": 2663519
}, },
"empty:path": { "empty:path": {
"bytesInOutput": 46 "bytesInOutput": 46
@ -12716,43 +12724,43 @@
"bytesInOutput": 41639 "bytesInOutput": 41639
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytesInOutput": 7353 "bytesInOutput": 7270
}, },
"src/face/keypoints.js": { "src/face/keypoints.js": {
"bytesInOutput": 2936 "bytesInOutput": 2936
}, },
"src/face/box.js": { "src/face/box.js": {
"bytesInOutput": 2208 "bytesInOutput": 2154
}, },
"src/face/util.js": { "src/face/util.js": {
"bytesInOutput": 3205 "bytesInOutput": 3205
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytesInOutput": 12926 "bytesInOutput": 12869
}, },
"src/face/uvcoords.js": { "src/face/uvcoords.js": {
"bytesInOutput": 21528 "bytesInOutput": 21528
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytesInOutput": 2723 "bytesInOutput": 2665
}, },
"src/profile.js": { "src/profile.js": {
"bytesInOutput": 1156 "bytesInOutput": 1156
}, },
"src/age/age.js": { "src/age/age.js": {
"bytesInOutput": 1975 "bytesInOutput": 1906
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytesInOutput": 3182 "bytesInOutput": 3107
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytesInOutput": 2883 "bytesInOutput": 2809
}, },
"src/body/modelBase.js": { "src/body/modelBase.js": {
"bytesInOutput": 970 "bytesInOutput": 920
}, },
"src/body/modelMobileNet.js": { "src/body/modelMobileNet.js": {
"bytesInOutput": 548 "bytesInOutput": 497
}, },
"src/body/heapSort.js": { "src/body/heapSort.js": {
"bytesInOutput": 1767 "bytesInOutput": 1767
@ -12773,40 +12781,43 @@
"bytesInOutput": 2100 "bytesInOutput": 2100
}, },
"src/body/util.js": { "src/body/util.js": {
"bytesInOutput": 2538 "bytesInOutput": 2536
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytesInOutput": 2148 "bytesInOutput": 2095
}, },
"src/body/posenet.js": { "src/body/posenet.js": {
"bytesInOutput": 1015 "bytesInOutput": 1015
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytesInOutput": 4651 "bytesInOutput": 4572
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytesInOutput": 7528 "bytesInOutput": 7470
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
"bytesInOutput": 291926 "bytesInOutput": 291926
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytesInOutput": 3262 "bytesInOutput": 3205
}, },
"src/gesture.js": { "src/gesture.js": {
"bytesInOutput": 3154 "bytesInOutput": 3154
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 21517 "bytesInOutput": 21513
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 4705 "bytesInOutput": 4651
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 13722 "bytesInOutput": 13878
},
"src/tf.js": {
"bytesInOutput": 46
}, },
"node_modules/@tensorflow/tfjs-core/dist/backends/backend.js": { "node_modules/@tensorflow/tfjs-core/dist/backends/backend.js": {
"bytesInOutput": 13308 "bytesInOutput": 13307
}, },
"node_modules/@tensorflow/tfjs-core/dist/util_base.js": { "node_modules/@tensorflow/tfjs-core/dist/util_base.js": {
"bytesInOutput": 13124 "bytesInOutput": 13124
@ -13187,7 +13198,7 @@
"bytesInOutput": 785 "bytesInOutput": 785
}, },
"node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/CropAndResize.js": { "node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/CropAndResize.js": {
"bytesInOutput": 2696 "bytesInOutput": 2683
}, },
"node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/Cumsum.js": { "node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/Cumsum.js": {
"bytesInOutput": 2555 "bytesInOutput": 2555
@ -13211,7 +13222,7 @@
"bytesInOutput": 1073 "bytesInOutput": 1073
}, },
"node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/FlipLeftRight.js": { "node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/FlipLeftRight.js": {
"bytesInOutput": 1587 "bytesInOutput": 1575
}, },
"node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/FloorDiv.js": { "node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/FloorDiv.js": {
"bytesInOutput": 860 "bytesInOutput": 860
@ -13313,7 +13324,7 @@
"bytesInOutput": 1839 "bytesInOutput": 1839
}, },
"node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/RotateWithOffset.js": { "node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/RotateWithOffset.js": {
"bytesInOutput": 2182 "bytesInOutput": 2170
}, },
"node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/Rsqrt.js": { "node_modules/@tensorflow/tfjs-backend-wasm/dist/kernels/Rsqrt.js": {
"bytesInOutput": 789 "bytesInOutput": 789
@ -13393,11 +13404,14 @@
"node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js": { "node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js": {
"bytesInOutput": 735 "bytesInOutput": 735
}, },
"src/tf.js": {
"bytesInOutput": 44
},
"src/face/triangulation.js": { "src/face/triangulation.js": {
"bytesInOutput": 23182 "bytesInOutput": 23182
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytesInOutput": 3233 "bytesInOutput": 3183
}, },
"src/hand/util.js": { "src/hand/util.js": {
"bytesInOutput": 3038 "bytesInOutput": 3038
@ -13412,7 +13426,7 @@
"bytesInOutput": 26 "bytesInOutput": 26
} }
}, },
"bytes": 3631167 "bytes": 3630472
} }
} }
} }

View File

@ -34,7 +34,6 @@ var __toModule = (module2) => {
// src/face/blazeface.js // src/face/blazeface.js
var require_blazeface = __commonJS((exports2) => { var require_blazeface = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const NUM_LANDMARKS = 6; const NUM_LANDMARKS = 6;
function generateAnchors(inputSize) { function generateAnchors(inputSize) {
const spec = {strides: [inputSize / 16, inputSize / 8], anchors: [2, 6]}; const spec = {strides: [inputSize / 16, inputSize / 8], anchors: [2, 6]};
@ -63,31 +62,31 @@ var require_blazeface = __commonJS((exports2) => {
}; };
const createBox = (startEndTensor) => ({ const createBox = (startEndTensor) => ({
startEndTensor, startEndTensor,
startPoint: tf3.slice(startEndTensor, [0, 0], [-1, 2]), startPoint: tf.slice(startEndTensor, [0, 0], [-1, 2]),
endPoint: tf3.slice(startEndTensor, [0, 2], [-1, 2]) endPoint: tf.slice(startEndTensor, [0, 2], [-1, 2])
}); });
const scaleBox = (box, factors) => { const scaleBox = (box, factors) => {
const starts = tf3.mul(box.startPoint, factors); const starts = tf.mul(box.startPoint, factors);
const ends = tf3.mul(box.endPoint, factors); const ends = tf.mul(box.endPoint, factors);
const newCoordinates = tf3.concat2d([starts, ends], 1); const newCoordinates = tf.concat2d([starts, ends], 1);
return createBox(newCoordinates); return createBox(newCoordinates);
}; };
function decodeBounds(boxOutputs, anchors, inputSize) { function decodeBounds(boxOutputs, anchors, inputSize) {
const boxStarts = tf3.slice(boxOutputs, [0, 1], [-1, 2]); const boxStarts = tf.slice(boxOutputs, [0, 1], [-1, 2]);
const centers = tf3.add(boxStarts, anchors); const centers = tf.add(boxStarts, anchors);
const boxSizes = tf3.slice(boxOutputs, [0, 3], [-1, 2]); const boxSizes = tf.slice(boxOutputs, [0, 3], [-1, 2]);
const boxSizesNormalized = tf3.div(boxSizes, inputSize); const boxSizesNormalized = tf.div(boxSizes, inputSize);
const centersNormalized = tf3.div(centers, inputSize); const centersNormalized = tf.div(centers, inputSize);
const halfBoxSize = tf3.div(boxSizesNormalized, 2); const halfBoxSize = tf.div(boxSizesNormalized, 2);
const starts = tf3.sub(centersNormalized, halfBoxSize); const starts = tf.sub(centersNormalized, halfBoxSize);
const ends = tf3.add(centersNormalized, halfBoxSize); const ends = tf.add(centersNormalized, halfBoxSize);
const startNormalized = tf3.mul(starts, inputSize); const startNormalized = tf.mul(starts, inputSize);
const endNormalized = tf3.mul(ends, inputSize); const endNormalized = tf.mul(ends, inputSize);
const concatAxis = 1; const concatAxis = 1;
return tf3.concat2d([startNormalized, endNormalized], concatAxis); return tf.concat2d([startNormalized, endNormalized], concatAxis);
} }
function scaleBoxFromPrediction(face2, scaleFactor) { function scaleBoxFromPrediction(face2, scaleFactor) {
return tf3.tidy(() => { return tf.tidy(() => {
const box = face2["box"] ? face2["box"] : face2; const box = face2["box"] ? face2["box"] : face2;
return scaleBox(box, scaleFactor).startEndTensor.squeeze(); return scaleBox(box, scaleFactor).startEndTensor.squeeze();
}); });
@ -98,37 +97,37 @@ var require_blazeface = __commonJS((exports2) => {
this.width = config2.detector.inputSize; this.width = config2.detector.inputSize;
this.height = config2.detector.inputSize; this.height = config2.detector.inputSize;
this.anchorsData = generateAnchors(config2.detector.inputSize); this.anchorsData = generateAnchors(config2.detector.inputSize);
this.anchors = tf3.tensor2d(this.anchorsData); this.anchors = tf.tensor2d(this.anchorsData);
this.inputSize = tf3.tensor1d([this.width, this.height]); this.inputSize = tf.tensor1d([this.width, this.height]);
this.config = config2; this.config = config2;
this.scaleFaces = 0.8; this.scaleFaces = 0.8;
} }
async getBoundingBoxes(inputImage) { async getBoundingBoxes(inputImage) {
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1) if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
return null; return null;
const [detectedOutputs, boxes, scores] = tf3.tidy(() => { const [detectedOutputs, boxes, scores] = tf.tidy(() => {
const resizedImage = inputImage.resizeBilinear([this.width, this.height]); const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
const normalizedImage = tf3.sub(resizedImage.div(127.5), 1); const normalizedImage = tf.sub(resizedImage.div(127.5), 1);
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage); const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
let prediction; let prediction;
if (Array.isArray(batchedPrediction)) { if (Array.isArray(batchedPrediction)) {
const sorted = batchedPrediction.sort((a, b) => a.size - b.size); const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
const concat384 = tf3.concat([sorted[0], sorted[2]], 2); const concat384 = tf.concat([sorted[0], sorted[2]], 2);
const concat512 = tf3.concat([sorted[1], sorted[3]], 2); const concat512 = tf.concat([sorted[1], sorted[3]], 2);
const concat2 = tf3.concat([concat512, concat384], 1); const concat = tf.concat([concat512, concat384], 1);
prediction = concat2.squeeze(0); prediction = concat.squeeze(0);
} else { } else {
prediction = batchedPrediction.squeeze(); prediction = batchedPrediction.squeeze();
} }
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize); const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
const logits = tf3.slice(prediction, [0, 0], [-1, 1]); const logits = tf.slice(prediction, [0, 0], [-1, 1]);
const scoresOut = tf3.sigmoid(logits).squeeze(); const scoresOut = tf.sigmoid(logits).squeeze();
return [prediction, decodedBounds, scoresOut]; return [prediction, decodedBounds, scoresOut];
}); });
const boxIndicesTensor = await tf3.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold); const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);
const boxIndices = boxIndicesTensor.arraySync(); const boxIndices = boxIndicesTensor.arraySync();
boxIndicesTensor.dispose(); boxIndicesTensor.dispose();
const boundingBoxesMap = boxIndices.map((boxIndex) => tf3.slice(boxes, [boxIndex, 0], [1, -1])); const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));
const boundingBoxes = boundingBoxesMap.map((boundingBox) => { const boundingBoxes = boundingBoxesMap.map((boundingBox) => {
const vals = boundingBox.arraySync(); const vals = boundingBox.arraySync();
boundingBox.dispose(); boundingBox.dispose();
@ -142,7 +141,7 @@ var require_blazeface = __commonJS((exports2) => {
if (confidence > this.config.detector.minConfidence) { if (confidence > this.config.detector.minConfidence) {
const box = createBox(boundingBoxes[i]); const box = createBox(boundingBoxes[i]);
const anchor = this.anchorsData[boxIndex]; const anchor = this.anchorsData[boxIndex];
const landmarks = tf3.tidy(() => tf3.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]).squeeze().reshape([NUM_LANDMARKS, -1])); const landmarks = tf.tidy(() => tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]).squeeze().reshape([NUM_LANDMARKS, -1]));
annotatedBoxes.push({box, landmarks, anchor, confidence}); annotatedBoxes.push({box, landmarks, anchor, confidence});
} }
} }
@ -185,7 +184,7 @@ var require_blazeface = __commonJS((exports2) => {
} }
} }
async function load(config2) { async function load(config2) {
const blazeface = await tf3.loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")}); const blazeface = await loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")});
const model = new BlazeFaceModel(blazeface, config2); const model = new BlazeFaceModel(blazeface, config2);
console.log(`Human: load model: ${config2.detector.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config2.detector.modelPath.match(/\/(.*)\./)[1]}`);
return model; return model;
@ -283,7 +282,6 @@ var require_keypoints = __commonJS((exports2) => {
// src/face/box.js // src/face/box.js
var require_box = __commonJS((exports2) => { var require_box = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
function scaleBoxCoordinates2(box, factor) { function scaleBoxCoordinates2(box, factor) {
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]]; const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]]; const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
@ -304,16 +302,16 @@ var require_box = __commonJS((exports2) => {
]; ];
} }
exports2.getBoxCenter = getBoxCenter2; exports2.getBoxCenter = getBoxCenter2;
function cutBoxFromImageAndResize2(box, image4, cropSize) { function cutBoxFromImageAndResize2(box, image2, cropSize) {
const h = image4.shape[1]; const h = image2.shape[1];
const w = image4.shape[2]; const w = image2.shape[2];
const boxes = [[ const boxes = [[
box.startPoint[1] / h, box.startPoint[1] / h,
box.startPoint[0] / w, box.startPoint[0] / w,
box.endPoint[1] / h, box.endPoint[1] / h,
box.endPoint[0] / w box.endPoint[0] / w
]]; ]];
return tf3.image.cropAndResize(image4, boxes, [0], cropSize); return tf.image.cropAndResize(image2, boxes, [0], cropSize);
} }
exports2.cutBoxFromImageAndResize = cutBoxFromImageAndResize2; exports2.cutBoxFromImageAndResize = cutBoxFromImageAndResize2;
function enlargeBox2(box, factor = 1.5) { function enlargeBox2(box, factor = 1.5) {
@ -422,7 +420,6 @@ var require_util = __commonJS((exports2) => {
// src/face/facepipeline.js // src/face/facepipeline.js
var require_facepipeline = __commonJS((exports2) => { var require_facepipeline = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const bounding = __toModule(require_box()); const bounding = __toModule(require_box());
const keypoints = __toModule(require_keypoints()); const keypoints = __toModule(require_keypoints());
const util = __toModule(require_util()); const util = __toModule(require_util());
@ -501,14 +498,14 @@ var require_facepipeline = __commonJS((exports2) => {
getEyeBox(rawCoords, face2, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) { getEyeBox(rawCoords, face2, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) {
const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge)); const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge));
const boxSize = bounding.getBoxSize(box); const boxSize = bounding.getBoxSize(box);
let crop = tf3.image.cropAndResize(face2, [[ let crop = tf.image.cropAndResize(face2, [[
box.startPoint[1] / this.meshHeight, box.startPoint[1] / this.meshHeight,
box.startPoint[0] / this.meshWidth, box.startPoint[0] / this.meshWidth,
box.endPoint[1] / this.meshHeight, box.endPoint[1] / this.meshHeight,
box.endPoint[0] / this.meshWidth box.endPoint[0] / this.meshWidth
]], [0], [this.irisSize, this.irisSize]); ]], [0], [this.irisSize, this.irisSize]);
if (flip) { if (flip) {
crop = tf3.image.flipLeftRight(crop); crop = tf.image.flipLeftRight(crop);
} }
return {box, boxSize, crop}; return {box, boxSize, crop};
} }
@ -580,7 +577,7 @@ var require_facepipeline = __commonJS((exports2) => {
prediction.landmarks.dispose(); prediction.landmarks.dispose();
}); });
} }
let results = tf3.tidy(() => this.storedBoxes.map((box, i) => { let results = tf.tidy(() => this.storedBoxes.map((box, i) => {
let angle = 0; let angle = 0;
const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT; const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT;
let [indexOfMouth, indexOfForehead] = MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES; let [indexOfMouth, indexOfForehead] = MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;
@ -593,7 +590,7 @@ var require_facepipeline = __commonJS((exports2) => {
let rotatedImage = input; let rotatedImage = input;
let rotationMatrix = util.IDENTITY_MATRIX; let rotationMatrix = util.IDENTITY_MATRIX;
if (angle !== 0) { if (angle !== 0) {
rotatedImage = tf3.image.rotateWithOffset(input, angle, 0, faceCenterNormalized); rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter); rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
} }
const boxCPU = {startPoint: box.startPoint, endPoint: box.endPoint}; const boxCPU = {startPoint: box.startPoint, endPoint: box.endPoint};
@ -615,12 +612,12 @@ var require_facepipeline = __commonJS((exports2) => {
coords.dispose(); coords.dispose();
return null; return null;
} }
const coordsReshaped = tf3.reshape(coords, [-1, 3]); const coordsReshaped = tf.reshape(coords, [-1, 3]);
let rawCoords = coordsReshaped.arraySync(); let rawCoords = coordsReshaped.arraySync();
if (config2.iris.enabled) { if (config2.iris.enabled) {
const {box: leftEyeBox, boxSize: leftEyeBoxSize, crop: leftEyeCrop} = this.getEyeBox(rawCoords, face2, LEFT_EYE_BOUNDS[0], LEFT_EYE_BOUNDS[1], true); const {box: leftEyeBox, boxSize: leftEyeBoxSize, crop: leftEyeCrop} = this.getEyeBox(rawCoords, face2, LEFT_EYE_BOUNDS[0], LEFT_EYE_BOUNDS[1], true);
const {box: rightEyeBox, boxSize: rightEyeBoxSize, crop: rightEyeCrop} = this.getEyeBox(rawCoords, face2, RIGHT_EYE_BOUNDS[0], RIGHT_EYE_BOUNDS[1]); const {box: rightEyeBox, boxSize: rightEyeBoxSize, crop: rightEyeCrop} = this.getEyeBox(rawCoords, face2, RIGHT_EYE_BOUNDS[0], RIGHT_EYE_BOUNDS[1]);
const eyePredictions = this.irisModel.predict(tf3.concat([leftEyeCrop, rightEyeCrop])); const eyePredictions = this.irisModel.predict(tf.concat([leftEyeCrop, rightEyeCrop]));
const eyePredictionsData = eyePredictions.dataSync(); const eyePredictionsData = eyePredictions.dataSync();
eyePredictions.dispose(); eyePredictions.dispose();
const leftEyeData = eyePredictionsData.slice(0, IRIS_NUM_COORDINATES * 3); const leftEyeData = eyePredictionsData.slice(0, IRIS_NUM_COORDINATES * 3);
@ -641,9 +638,9 @@ var require_facepipeline = __commonJS((exports2) => {
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords); rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
} }
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix); const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
tf3.dispose(rawCoords); tf.dispose(rawCoords);
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData)); const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
const transformedCoords = tf3.tensor2d(transformedCoordsData); const transformedCoords = tf.tensor2d(transformedCoordsData);
const prediction = { const prediction = {
coords: transformedCoords, coords: transformedCoords,
box: landmarksBox, box: landmarksBox,
@ -1145,7 +1142,6 @@ var require_uvcoords = __commonJS((exports2) => {
// src/face/facemesh.js // src/face/facemesh.js
var require_facemesh = __commonJS((exports2) => { var require_facemesh = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const blazeface = __toModule(require_blazeface()); const blazeface = __toModule(require_blazeface());
const keypoints = __toModule(require_keypoints()); const keypoints = __toModule(require_keypoints());
const pipe = __toModule(require_facepipeline()); const pipe = __toModule(require_facepipeline());
@ -1178,7 +1174,7 @@ var require_facemesh = __commonJS((exports2) => {
box: prediction.box ? [prediction.box.startPoint[0], prediction.box.startPoint[1], prediction.box.endPoint[0] - prediction.box.startPoint[0], prediction.box.endPoint[1] - prediction.box.startPoint[1]] : 0, box: prediction.box ? [prediction.box.startPoint[0], prediction.box.startPoint[1], prediction.box.endPoint[0] - prediction.box.startPoint[0], prediction.box.endPoint[1] - prediction.box.startPoint[1]] : 0,
mesh, mesh,
annotations, annotations,
image: prediction.image ? tf3.clone(prediction.image) : null image: prediction.image ? tf.clone(prediction.image) : null
}); });
if (prediction.coords) if (prediction.coords)
prediction.coords.dispose(); prediction.coords.dispose();
@ -1191,8 +1187,8 @@ var require_facemesh = __commonJS((exports2) => {
async function load(config2) { async function load(config2) {
const models = await Promise.all([ const models = await Promise.all([
blazeface.load(config2), blazeface.load(config2),
tf3.loadGraphModel(config2.mesh.modelPath, {fromTFHub: config2.mesh.modelPath.includes("tfhub.dev")}), loadGraphModel(config2.mesh.modelPath, {fromTFHub: config2.mesh.modelPath.includes("tfhub.dev")}),
tf3.loadGraphModel(config2.iris.modelPath, {fromTFHub: config2.iris.modelPath.includes("tfhub.dev")}) loadGraphModel(config2.iris.modelPath, {fromTFHub: config2.iris.modelPath.includes("tfhub.dev")})
]); ]);
const faceMesh = new MediaPipeFaceMesh(models[0], models[1], models[2], config2); const faceMesh = new MediaPipeFaceMesh(models[0], models[1], models[2], config2);
console.log(`Human: load model: ${config2.mesh.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config2.mesh.modelPath.match(/\/(.*)\./)[1]}`);
@ -1234,7 +1230,6 @@ var require_profile = __commonJS((exports2) => {
// src/age/age.js // src/age/age.js
var require_age = __commonJS((exports2) => { var require_age = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const profile2 = __toModule(require_profile()); const profile2 = __toModule(require_profile());
const models = {}; const models = {};
let last = {age: 0}; let last = {age: 0};
@ -1242,12 +1237,12 @@ var require_age = __commonJS((exports2) => {
const zoom = [0, 0]; const zoom = [0, 0];
async function load(config2) { async function load(config2) {
if (!models.age) { if (!models.age) {
models.age = await tf3.loadGraphModel(config2.face.age.modelPath); models.age = await loadGraphModel(config2.face.age.modelPath);
console.log(`Human: load model: ${config2.face.age.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config2.face.age.modelPath.match(/\/(.*)\./)[1]}`);
} }
return models.age; return models.age;
} }
async function predict(image4, config2) { async function predict(image2, config2) {
if (frame < config2.face.age.skipFrames && last.age && last.age > 0) { if (frame < config2.face.age.skipFrames && last.age && last.age > 0) {
frame += 1; frame += 1;
return last; return last;
@ -1255,21 +1250,21 @@ var require_age = __commonJS((exports2) => {
frame = 0; frame = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const box = [[ const box = [[
image4.shape[1] * zoom[0] / image4.shape[1], image2.shape[1] * zoom[0] / image2.shape[1],
image4.shape[2] * zoom[1] / image4.shape[2], image2.shape[2] * zoom[1] / image2.shape[2],
(image4.shape[1] - image4.shape[1] * zoom[0]) / image4.shape[1], (image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
(image4.shape[2] - image4.shape[2] * zoom[1]) / image4.shape[2] (image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
]]; ]];
const resize = tf3.image.cropAndResize(image4, box, [0], [config2.face.age.inputSize, config2.face.age.inputSize]); const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.age.inputSize, config2.face.age.inputSize]);
const enhance = tf3.mul(resize, [255]); const enhance = tf.mul(resize, [255]);
tf3.dispose(resize); tf.dispose(resize);
let ageT; let ageT;
const obj = {}; const obj = {};
if (!config2.profile) { if (!config2.profile) {
if (config2.face.age.enabled) if (config2.face.age.enabled)
ageT = await models.age.predict(enhance); ageT = await models.age.predict(enhance);
} else { } else {
const profileAge = config2.face.age.enabled ? await tf3.profile(() => models.age.predict(enhance)) : {}; const profileAge = config2.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone(); ageT = profileAge.result.clone();
profileAge.result.dispose(); profileAge.result.dispose();
profile2.run("age", profileAge); profile2.run("age", profileAge);
@ -1290,7 +1285,6 @@ var require_age = __commonJS((exports2) => {
// src/gender/gender.js // src/gender/gender.js
var require_gender = __commonJS((exports2) => { var require_gender = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const profile2 = __toModule(require_profile()); const profile2 = __toModule(require_profile());
const models = {}; const models = {};
let last = {gender: ""}; let last = {gender: ""};
@ -1300,13 +1294,13 @@ var require_gender = __commonJS((exports2) => {
const rgb = [0.2989, 0.587, 0.114]; const rgb = [0.2989, 0.587, 0.114];
async function load(config2) { async function load(config2) {
if (!models.gender) { if (!models.gender) {
models.gender = await tf3.loadGraphModel(config2.face.gender.modelPath); models.gender = await loadGraphModel(config2.face.gender.modelPath);
alternative = models.gender.inputs[0].shape[3] === 1; alternative = models.gender.inputs[0].shape[3] === 1;
console.log(`Human: load model: ${config2.face.gender.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config2.face.gender.modelPath.match(/\/(.*)\./)[1]}`);
} }
return models.gender; return models.gender;
} }
async function predict(image4, config2) { async function predict(image2, config2) {
if (frame < config2.face.gender.skipFrames && last.gender !== "") { if (frame < config2.face.gender.skipFrames && last.gender !== "") {
frame += 1; frame += 1;
return last; return last;
@ -1314,33 +1308,33 @@ var require_gender = __commonJS((exports2) => {
frame = 0; frame = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const box = [[ const box = [[
image4.shape[1] * zoom[0] / image4.shape[1], image2.shape[1] * zoom[0] / image2.shape[1],
image4.shape[2] * zoom[1] / image4.shape[2], image2.shape[2] * zoom[1] / image2.shape[2],
(image4.shape[1] - image4.shape[1] * zoom[0]) / image4.shape[1], (image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
(image4.shape[2] - image4.shape[2] * zoom[1]) / image4.shape[2] (image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
]]; ]];
const resize = tf3.image.cropAndResize(image4, box, [0], [config2.face.gender.inputSize, config2.face.gender.inputSize]); const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.gender.inputSize, config2.face.gender.inputSize]);
let enhance; let enhance;
if (alternative) { if (alternative) {
enhance = tf3.tidy(() => { enhance = tf.tidy(() => {
const [red, green, blue] = tf3.split(resize, 3, 3); const [red, green, blue] = tf.split(resize, 3, 3);
const redNorm = tf3.mul(red, rgb[0]); const redNorm = tf.mul(red, rgb[0]);
const greenNorm = tf3.mul(green, rgb[1]); const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = tf3.mul(blue, rgb[2]); const blueNorm = tf.mul(blue, rgb[2]);
const grayscale = tf3.addN([redNorm, greenNorm, blueNorm]); const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
return grayscale.sub(0.5).mul(2); return grayscale.sub(0.5).mul(2);
}); });
} else { } else {
enhance = tf3.mul(resize, [255]); enhance = tf.mul(resize, [255]);
} }
tf3.dispose(resize); tf.dispose(resize);
let genderT; let genderT;
const obj = {}; const obj = {};
if (!config2.profile) { if (!config2.profile) {
if (config2.face.gender.enabled) if (config2.face.gender.enabled)
genderT = await models.gender.predict(enhance); genderT = await models.gender.predict(enhance);
} else { } else {
const profileGender = config2.face.gender.enabled ? await tf3.profile(() => models.gender.predict(enhance)) : {}; const profileGender = config2.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone(); genderT = profileGender.result.clone();
profileGender.result.dispose(); profileGender.result.dispose();
profile2.run("gender", profileGender); profile2.run("gender", profileGender);
@ -1373,7 +1367,6 @@ var require_gender = __commonJS((exports2) => {
// src/emotion/emotion.js // src/emotion/emotion.js
var require_emotion = __commonJS((exports2) => { var require_emotion = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const profile2 = __toModule(require_profile()); const profile2 = __toModule(require_profile());
const annotations = ["angry", "disgust", "fear", "happy", "sad", "surpise", "neutral"]; const annotations = ["angry", "disgust", "fear", "happy", "sad", "surpise", "neutral"];
const models = {}; const models = {};
@ -1384,12 +1377,12 @@ var require_emotion = __commonJS((exports2) => {
const scale = 1; const scale = 1;
async function load(config2) { async function load(config2) {
if (!models.emotion) { if (!models.emotion) {
models.emotion = await tf3.loadGraphModel(config2.face.emotion.modelPath); models.emotion = await loadGraphModel(config2.face.emotion.modelPath);
console.log(`Human: load model: ${config2.face.emotion.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config2.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
} }
return models.emotion; return models.emotion;
} }
async function predict(image4, config2) { async function predict(image2, config2) {
if (frame < config2.face.emotion.skipFrames && last.length > 0) { if (frame < config2.face.emotion.skipFrames && last.length > 0) {
frame += 1; frame += 1;
return last; return last;
@ -1397,25 +1390,25 @@ var require_emotion = __commonJS((exports2) => {
frame = 0; frame = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const box = [[ const box = [[
image4.shape[1] * zoom[0] / image4.shape[1], image2.shape[1] * zoom[0] / image2.shape[1],
image4.shape[2] * zoom[1] / image4.shape[2], image2.shape[2] * zoom[1] / image2.shape[2],
(image4.shape[1] - image4.shape[1] * zoom[0]) / image4.shape[1], (image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
(image4.shape[2] - image4.shape[2] * zoom[1]) / image4.shape[2] (image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
]]; ]];
const resize = tf3.image.cropAndResize(image4, box, [0], [config2.face.emotion.inputSize, config2.face.emotion.inputSize]); const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.emotion.inputSize, config2.face.emotion.inputSize]);
const [red, green, blue] = tf3.split(resize, 3, 3); const [red, green, blue] = tf.split(resize, 3, 3);
resize.dispose(); resize.dispose();
const redNorm = tf3.mul(red, rgb[0]); const redNorm = tf.mul(red, rgb[0]);
const greenNorm = tf3.mul(green, rgb[1]); const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = tf3.mul(blue, rgb[2]); const blueNorm = tf.mul(blue, rgb[2]);
red.dispose(); red.dispose();
green.dispose(); green.dispose();
blue.dispose(); blue.dispose();
const grayscale = tf3.addN([redNorm, greenNorm, blueNorm]); const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
redNorm.dispose(); redNorm.dispose();
greenNorm.dispose(); greenNorm.dispose();
blueNorm.dispose(); blueNorm.dispose();
const normalize = tf3.tidy(() => grayscale.sub(0.5).mul(2)); const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));
grayscale.dispose(); grayscale.dispose();
const obj = []; const obj = [];
if (config2.face.emotion.enabled) { if (config2.face.emotion.enabled) {
@ -1423,9 +1416,9 @@ var require_emotion = __commonJS((exports2) => {
if (!config2.profile) { if (!config2.profile) {
const emotionT = await models.emotion.predict(normalize); const emotionT = await models.emotion.predict(normalize);
data2 = emotionT.dataSync(); data2 = emotionT.dataSync();
tf3.dispose(emotionT); tf.dispose(emotionT);
} else { } else {
const profileData = await tf3.profile(() => models.emotion.predict(normalize)); const profileData = await tf.profile(() => models.emotion.predict(normalize));
data2 = profileData.result.dataSync(); data2 = profileData.result.dataSync();
profileData.result.dispose(); profileData.result.dispose();
profile2.run("emotion", profileData); profile2.run("emotion", profileData);
@ -1447,14 +1440,13 @@ var require_emotion = __commonJS((exports2) => {
// src/body/modelBase.js // src/body/modelBase.js
var require_modelBase = __commonJS((exports2) => { var require_modelBase = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
class BaseModel { class BaseModel {
constructor(model, outputStride) { constructor(model, outputStride) {
this.model = model; this.model = model;
this.outputStride = outputStride; this.outputStride = outputStride;
} }
predict(input) { predict(input) {
return tf3.tidy(() => { return tf.tidy(() => {
const asFloat = this.preprocessInput(input.toFloat()); const asFloat = this.preprocessInput(input.toFloat());
const asBatch = asFloat.expandDims(0); const asBatch = asFloat.expandDims(0);
const results = this.model.predict(asBatch); const results = this.model.predict(asBatch);
@ -1477,11 +1469,10 @@ var require_modelBase = __commonJS((exports2) => {
// src/body/modelMobileNet.js // src/body/modelMobileNet.js
var require_modelMobileNet = __commonJS((exports2) => { var require_modelMobileNet = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const modelBase = __toModule(require_modelBase()); const modelBase = __toModule(require_modelBase());
class MobileNet extends modelBase.BaseModel { class MobileNet extends modelBase.BaseModel {
preprocessInput(input) { preprocessInput(input) {
return tf3.tidy(() => tf3.div(input, 127.5).sub(1)); return tf.tidy(() => tf.div(input, 127.5).sub(1));
} }
nameOutputResults(results) { nameOutputResults(results) {
const [offsets, heatmap, displacementFwd, displacementBwd] = results; const [offsets, heatmap, displacementFwd, displacementBwd] = results;
@ -1896,8 +1887,8 @@ var require_util2 = __commonJS((exports2) => {
}; };
} }
exports2.scalePose = scalePose; exports2.scalePose = scalePose;
function resizeTo(image3, [targetH, targetW]) { function resizeTo(image2, [targetH, targetW]) {
const input = image3.squeeze(0); const input = image2.squeeze(0);
const resized = input.resizeBilinear([targetH, targetW]); const resized = input.resizeBilinear([targetH, targetW]);
input.dispose(); input.dispose();
return resized; return resized;
@ -1912,7 +1903,6 @@ var require_util2 = __commonJS((exports2) => {
// src/body/modelPoseNet.js // src/body/modelPoseNet.js
var require_modelPoseNet = __commonJS((exports2) => { var require_modelPoseNet = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const modelMobileNet = __toModule(require_modelMobileNet()); const modelMobileNet = __toModule(require_modelMobileNet());
const decodeMultiple = __toModule(require_decodeMultiple()); const decodeMultiple = __toModule(require_decodeMultiple());
const util = __toModule(require_util2()); const util = __toModule(require_util2());
@ -1948,7 +1938,7 @@ var require_modelPoseNet = __commonJS((exports2) => {
} }
exports2.PoseNet = PoseNet; exports2.PoseNet = PoseNet;
async function load(config2) { async function load(config2) {
const graphModel = await tf3.loadGraphModel(config2.body.modelPath); const graphModel = await loadGraphModel(config2.body.modelPath);
const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride); const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);
console.log(`Human: load model: ${config2.body.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config2.body.modelPath.match(/\/(.*)\./)[1]}`);
return new PoseNet(mobilenet); return new PoseNet(mobilenet);
@ -1980,7 +1970,6 @@ var require_posenet = __commonJS((exports2) => {
// src/hand/handdetector.js // src/hand/handdetector.js
var require_handdetector = __commonJS((exports2) => { var require_handdetector = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
/** /**
* @license * @license
* Copyright 2020 Google LLC. All Rights Reserved. * Copyright 2020 Google LLC. All Rights Reserved.
@ -2001,46 +1990,46 @@ var require_handdetector = __commonJS((exports2) => {
constructor(model, inputSize, anchorsAnnotated) { constructor(model, inputSize, anchorsAnnotated) {
this.model = model; this.model = model;
this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]); this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);
this.anchorsTensor = tf3.tensor2d(this.anchors); this.anchorsTensor = tf.tensor2d(this.anchors);
this.inputSizeTensor = tf3.tensor1d([inputSize, inputSize]); this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]);
this.doubleInputSizeTensor = tf3.tensor1d([inputSize * 2, inputSize * 2]); this.doubleInputSizeTensor = tf.tensor1d([inputSize * 2, inputSize * 2]);
} }
normalizeBoxes(boxes) { normalizeBoxes(boxes) {
return tf3.tidy(() => { return tf.tidy(() => {
const boxOffsets = tf3.slice(boxes, [0, 0], [-1, 2]); const boxOffsets = tf.slice(boxes, [0, 0], [-1, 2]);
const boxSizes = tf3.slice(boxes, [0, 2], [-1, 2]); const boxSizes = tf.slice(boxes, [0, 2], [-1, 2]);
const boxCenterPoints = tf3.add(tf3.div(boxOffsets, this.inputSizeTensor), this.anchorsTensor); const boxCenterPoints = tf.add(tf.div(boxOffsets, this.inputSizeTensor), this.anchorsTensor);
const halfBoxSizes = tf3.div(boxSizes, this.doubleInputSizeTensor); const halfBoxSizes = tf.div(boxSizes, this.doubleInputSizeTensor);
const startPoints = tf3.mul(tf3.sub(boxCenterPoints, halfBoxSizes), this.inputSizeTensor); const startPoints = tf.mul(tf.sub(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);
const endPoints = tf3.mul(tf3.add(boxCenterPoints, halfBoxSizes), this.inputSizeTensor); const endPoints = tf.mul(tf.add(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);
return tf3.concat2d([startPoints, endPoints], 1); return tf.concat2d([startPoints, endPoints], 1);
}); });
} }
normalizeLandmarks(rawPalmLandmarks, index) { normalizeLandmarks(rawPalmLandmarks, index) {
return tf3.tidy(() => { return tf.tidy(() => {
const landmarks = tf3.add(tf3.div(rawPalmLandmarks.reshape([-1, 7, 2]), this.inputSizeTensor), this.anchors[index]); const landmarks = tf.add(tf.div(rawPalmLandmarks.reshape([-1, 7, 2]), this.inputSizeTensor), this.anchors[index]);
return tf3.mul(landmarks, this.inputSizeTensor); return tf.mul(landmarks, this.inputSizeTensor);
}); });
} }
async getBoxes(input, config2) { async getBoxes(input, config2) {
const batched = this.model.predict(input); const batched = this.model.predict(input);
const predictions = batched.squeeze(); const predictions = batched.squeeze();
batched.dispose(); batched.dispose();
const scores = tf3.tidy(() => tf3.sigmoid(tf3.slice(predictions, [0, 0], [-1, 1])).squeeze()); const scores = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze());
const scoresVal = scores.dataSync(); const scoresVal = scores.dataSync();
const rawBoxes = tf3.slice(predictions, [0, 1], [-1, 4]); const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);
const boxes = this.normalizeBoxes(rawBoxes); const boxes = this.normalizeBoxes(rawBoxes);
rawBoxes.dispose(); rawBoxes.dispose();
const filteredT = await tf3.image.nonMaxSuppressionAsync(boxes, scores, config2.maxHands, config2.iouThreshold, config2.scoreThreshold); const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config2.maxHands, config2.iouThreshold, config2.scoreThreshold);
const filtered = filteredT.arraySync(); const filtered = filteredT.arraySync();
scores.dispose(); scores.dispose();
filteredT.dispose(); filteredT.dispose();
const hands = []; const hands = [];
for (const boxIndex of filtered) { for (const boxIndex of filtered) {
if (scoresVal[boxIndex] >= config2.minConfidence) { if (scoresVal[boxIndex] >= config2.minConfidence) {
const matchingBox = tf3.slice(boxes, [boxIndex, 0], [1, -1]); const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);
const rawPalmLandmarks = tf3.slice(predictions, [boxIndex, 5], [1, 14]); const rawPalmLandmarks = tf.slice(predictions, [boxIndex, 5], [1, 14]);
const palmLandmarks = tf3.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2])); const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));
rawPalmLandmarks.dispose(); rawPalmLandmarks.dispose();
hands.push({box: matchingBox, palmLandmarks, confidence: scoresVal[boxIndex]}); hands.push({box: matchingBox, palmLandmarks, confidence: scoresVal[boxIndex]});
} }
@ -2052,9 +2041,9 @@ var require_handdetector = __commonJS((exports2) => {
async estimateHandBounds(input, config2) { async estimateHandBounds(input, config2) {
const inputHeight = input.shape[1]; const inputHeight = input.shape[1];
const inputWidth = input.shape[2]; const inputWidth = input.shape[2];
const image4 = tf3.tidy(() => input.resizeBilinear([config2.inputSize, config2.inputSize]).div(127.5).sub(1)); const image2 = tf.tidy(() => input.resizeBilinear([config2.inputSize, config2.inputSize]).div(127.5).sub(1));
const predictions = await this.getBoxes(image4, config2); const predictions = await this.getBoxes(image2, config2);
image4.dispose(); image2.dispose();
if (!predictions || predictions.length === 0) if (!predictions || predictions.length === 0)
return null; return null;
const hands = []; const hands = [];
@ -2075,7 +2064,6 @@ var require_handdetector = __commonJS((exports2) => {
// src/hand/handpipeline.js // src/hand/handpipeline.js
var require_handpipeline = __commonJS((exports2) => { var require_handpipeline = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
/** /**
* @license * @license
* Copyright 2020 Google LLC. All Rights Reserved. * Copyright 2020 Google LLC. All Rights Reserved.
@ -2151,13 +2139,13 @@ var require_handpipeline = __commonJS((exports2) => {
coord[2] coord[2]
]); ]);
} }
async estimateHands(image4, config2) { async estimateHands(image2, config2) {
this.skipped++; this.skipped++;
let useFreshBox = false; let useFreshBox = false;
let boxes; let boxes;
if (this.skipped > config2.skipFrames || !config2.landmarks) { if (this.skipped > config2.skipFrames || !config2.landmarks) {
boxes = await this.boxDetector.estimateHandBounds(image4, config2); boxes = await this.boxDetector.estimateHandBounds(image2, config2);
if (image4.shape[1] !== 255 && image4.shape[2] !== 255) if (image2.shape[1] !== 255 && image2.shape[2] !== 255)
this.skipped = 0; this.skipped = 0;
} }
if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config2.maxHands || !config2.landmarks)) { if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config2.maxHands || !config2.landmarks)) {
@ -2176,8 +2164,8 @@ var require_handpipeline = __commonJS((exports2) => {
if (config2.landmarks) { if (config2.landmarks) {
const angle = computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]); const angle = computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
const palmCenter = getBoxCenter(currentBox); const palmCenter = getBoxCenter(currentBox);
const palmCenterNormalized = [palmCenter[0] / image4.shape[2], palmCenter[1] / image4.shape[1]]; const palmCenterNormalized = [palmCenter[0] / image2.shape[2], palmCenter[1] / image2.shape[1]];
const rotatedImage = tf3.image.rotateWithOffset(image4, angle, 0, palmCenterNormalized); const rotatedImage = tf.image.rotateWithOffset(image2, angle, 0, palmCenterNormalized);
const rotationMatrix = buildRotationMatrix(-angle, palmCenter); const rotationMatrix = buildRotationMatrix(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox; const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]); const croppedInput = cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -2189,7 +2177,7 @@ var require_handpipeline = __commonJS((exports2) => {
const confidenceValue = confidence.dataSync()[0]; const confidenceValue = confidence.dataSync()[0];
confidence.dispose(); confidence.dispose();
if (confidenceValue >= config2.minConfidence) { if (confidenceValue >= config2.minConfidence) {
const keypointsReshaped = tf3.reshape(keypoints, [-1, 3]); const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync(); const rawCoords = keypointsReshaped.arraySync();
keypoints.dispose(); keypoints.dispose();
keypointsReshaped.dispose(); keypointsReshaped.dispose();
@ -19908,7 +19896,6 @@ var require_anchors = __commonJS((exports2) => {
// src/hand/handpose.js // src/hand/handpose.js
var require_handpose = __commonJS((exports2) => { var require_handpose = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const handdetector = __toModule(require_handdetector()); const handdetector = __toModule(require_handdetector());
const pipeline = __toModule(require_handpipeline()); const pipeline = __toModule(require_handpipeline());
const anchors = __toModule(require_anchors()); const anchors = __toModule(require_anchors());
@ -19973,8 +19960,8 @@ var require_handpose = __commonJS((exports2) => {
exports2.HandPose = HandPose; exports2.HandPose = HandPose;
async function load(config2) { async function load(config2) {
const [handDetectorModel, handPoseModel] = await Promise.all([ const [handDetectorModel, handPoseModel] = await Promise.all([
tf3.loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")}), loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")}),
tf3.loadGraphModel(config2.skeleton.modelPath, {fromTFHub: config2.skeleton.modelPath.includes("tfhub.dev")}) loadGraphModel(config2.skeleton.modelPath, {fromTFHub: config2.skeleton.modelPath.includes("tfhub.dev")})
]); ]);
const detector = new handdetector.HandDetector(handDetectorModel, config2.inputSize, anchors.anchors); const detector = new handdetector.HandDetector(handDetectorModel, config2.inputSize, anchors.anchors);
const pipe = new pipeline.HandPipeline(detector, handPoseModel, config2.inputSize); const pipe = new pipeline.HandPipeline(detector, handPoseModel, config2.inputSize);
@ -20123,8 +20110,8 @@ var require_imagefx = __commonJS((exports2) => {
this.reset = function() { this.reset = function() {
_filterChain = []; _filterChain = [];
}; };
this.apply = function(image3) { this.apply = function(image2) {
_resize(image3.width, image3.height); _resize(image2.width, image2.height);
_drawCount = 0; _drawCount = 0;
if (!_sourceTexture) if (!_sourceTexture)
_sourceTexture = gl.createTexture(); _sourceTexture = gl.createTexture();
@ -20133,7 +20120,7 @@ var require_imagefx = __commonJS((exports2) => {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image3); gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image2);
if (_filterChain.length === 0) { if (_filterChain.length === 0) {
_draw(); _draw();
return _canvas; return _canvas;
@ -20770,14 +20757,13 @@ var require_imagefx = __commonJS((exports2) => {
// src/image.js // src/image.js
var require_image = __commonJS((exports2) => { var require_image = __commonJS((exports2) => {
const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const fxImage = __toModule(require_imagefx()); const fxImage = __toModule(require_imagefx());
let inCanvas = null; let inCanvas = null;
let outCanvas = null; let outCanvas = null;
function process3(input, config2) { function process3(input, config2) {
let tensor; let tensor;
if (input instanceof tf3.Tensor) { if (input instanceof tf.Tensor) {
tensor = tf3.clone(input); tensor = tf.clone(input);
} else { } else {
const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0; const originalWidth = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0; const originalHeight = input.naturalHeight || input.videoHeight || input.height || input.shape && input.shape[2] > 0;
@ -20810,7 +20796,7 @@ var require_image = __commonJS((exports2) => {
outCanvas.width = inCanvas.width; outCanvas.width = inCanvas.width;
if (outCanvas.height !== inCanvas.height) if (outCanvas.height !== inCanvas.height)
outCanvas.height = inCanvas.height; outCanvas.height = inCanvas.height;
this.fx = tf3.ENV.flags.IS_BROWSER ? new fxImage.Canvas({canvas: outCanvas}) : null; this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({canvas: outCanvas}) : null;
} }
this.fx.reset(); this.fx.reset();
this.fx.addFilter("brightness", config2.filter.brightness); this.fx.addFilter("brightness", config2.filter.brightness);
@ -20846,7 +20832,7 @@ var require_image = __commonJS((exports2) => {
} }
let pixels; let pixels;
if (config2.backend === "webgl" || outCanvas instanceof ImageData) { if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
pixels = tf3.browser.fromPixels(outCanvas); pixels = tf.browser.fromPixels(outCanvas);
} else { } else {
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas"); const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
tempCanvas.width = targetWidth; tempCanvas.width = targetWidth;
@ -20854,7 +20840,7 @@ var require_image = __commonJS((exports2) => {
const tempCtx = tempCanvas.getContext("2d"); const tempCtx = tempCanvas.getContext("2d");
tempCtx.drawImage(outCanvas, 0, 0); tempCtx.drawImage(outCanvas, 0, 0);
const data2 = tempCtx.getImageData(0, 0, targetWidth, targetHeight); const data2 = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
pixels = tf3.browser.fromPixels(data2); pixels = tf.browser.fromPixels(data2);
} }
const casted = pixels.toFloat(); const casted = pixels.toFloat();
tensor = casted.expandDims(0); tensor = casted.expandDims(0);
@ -20870,8 +20856,11 @@ var require_image = __commonJS((exports2) => {
__export(exports, { __export(exports, {
default: () => Human default: () => Human
}); });
const tf2 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
// src/tf.js
const tf = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
const dist = __toModule(require("@tensorflow/tfjs-backend-wasm/dist/index.js")); const dist = __toModule(require("@tensorflow/tfjs-backend-wasm/dist/index.js"));
const loadGraphModel = tf.loadGraphModel;
// src/face/triangulation.js // src/face/triangulation.js
var triangulation_default = [ var triangulation_default = [
@ -23525,7 +23514,6 @@ const emotion = __toModule(require_emotion());
const posenet = __toModule(require_posenet()); const posenet = __toModule(require_posenet());
// src/hand/box.js // src/hand/box.js
const tf = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js"));
/** /**
* @license * @license
* Copyright 2020 Google LLC. All Rights Reserved. * Copyright 2020 Google LLC. All Rights Reserved.
@ -23554,16 +23542,16 @@ function getBoxCenter(box) {
box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2 box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2
]; ];
} }
function cutBoxFromImageAndResize(box, image3, cropSize) { function cutBoxFromImageAndResize(box, image2, cropSize) {
const h = image3.shape[1]; const h = image2.shape[1];
const w = image3.shape[2]; const w = image2.shape[2];
const boxes = [[ const boxes = [[
box.startPoint[1] / h, box.startPoint[1] / h,
box.startPoint[0] / w, box.startPoint[0] / w,
box.endPoint[1] / h, box.endPoint[1] / h,
box.endPoint[0] / w box.endPoint[0] / w
]]; ]];
return tf.image.cropAndResize(image3, boxes, [0], cropSize); return tf.image.cropAndResize(image2, boxes, [0], cropSize);
} }
function scaleBoxCoordinates(box, factor) { function scaleBoxCoordinates(box, factor) {
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]]; const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
@ -23684,7 +23672,7 @@ function rotatePoint(homogeneousCoordinate, rotationMatrix) {
// src/human.js // src/human.js
const handpose = __toModule(require_handpose()); const handpose = __toModule(require_handpose());
const gesture = __toModule(require_gesture()); const gesture = __toModule(require_gesture());
const image2 = __toModule(require_image()); const image = __toModule(require_image());
const profile = __toModule(require_profile()); const profile = __toModule(require_profile());
// config.js // config.js
@ -23788,7 +23776,7 @@ var config_default = {
}; };
// package.json // package.json
var version = "0.8.5"; var version = "0.8.6";
// src/human.js // src/human.js
const disableSkipFrames = { const disableSkipFrames = {
@ -23819,7 +23807,7 @@ function mergeDeep(...objects) {
} }
class Human { class Human {
constructor(userConfig = {}) { constructor(userConfig = {}) {
this.tf = tf2; this.tf = tf;
this.version = version; this.version = version;
this.config = mergeDeep(config_default, userConfig); this.config = mergeDeep(config_default, userConfig);
this.fx = null; this.fx = null;
@ -23857,7 +23845,7 @@ class Human {
analyze(...msg) { analyze(...msg) {
if (!this.analyzeMemoryLeaks) if (!this.analyzeMemoryLeaks)
return; return;
const current = tf2.engine().state.numTensors; const current = tf.engine().state.numTensors;
const previous = this.numTensors; const previous = this.numTensors;
this.numTensors = current; this.numTensors = current;
const leaked = current - previous; const leaked = current - previous;
@ -23869,11 +23857,11 @@ class Human {
return null; return null;
if (!input) if (!input)
return "input is not defined"; return "input is not defined";
if (tf2.ENV.flags.IS_NODE && !(input instanceof tf2.Tensor)) { if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
return "input must be a tensor"; return "input must be a tensor";
} }
try { try {
tf2.getBackend(); tf.getBackend();
} catch (e) { } catch (e) {
return "backend not loaded"; return "backend not loaded";
} }
@ -23886,9 +23874,9 @@ class Human {
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (this.firstRun) { if (this.firstRun) {
this.checkBackend(true); this.checkBackend(true);
this.log(`version: ${this.version} TensorFlow/JS version: ${tf2.version_core}`); this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
this.log("configuration:", this.config); this.log("configuration:", this.config);
this.log("flags:", tf2.ENV.flags); this.log("flags:", tf.ENV.flags);
this.firstRun = false; this.firstRun = false;
} }
if (this.config.async) { if (this.config.async) {
@ -23900,12 +23888,12 @@ class Human {
this.models.posenet, this.models.posenet,
this.models.handpose this.models.handpose
] = await Promise.all([ ] = await Promise.all([
this.models.age || age.load(this.config), this.config.face.age.enabled ? this.models.age || age.load(this.config) : null,
this.models.gender || gender.load(this.config), this.config.face.gender.enabled ? this.models.gender || gender.load(this.config) : null,
this.models.emotion || emotion.load(this.config), this.config.face.emotion.enabled ? this.models.emotion || emotion.load(this.config) : null,
this.models.facemesh || facemesh.load(this.config.face), this.config.face.enabled ? this.models.facemesh || facemesh.load(this.config.face) : null,
this.models.posenet || posenet.load(this.config), this.config.body.enabled ? this.models.posenet || posenet.load(this.config) : null,
this.models.handpose || handpose.load(this.config.hand) this.config.hand.enabled ? this.models.handpose || handpose.load(this.config.hand) : null
]); ]);
} else { } else {
if (this.config.face.enabled && !this.models.facemesh) if (this.config.face.enabled && !this.models.facemesh)
@ -23927,26 +23915,26 @@ class Human {
} }
async checkBackend(force) { async checkBackend(force) {
const timeStamp = now(); const timeStamp = now();
if (this.config.backend && this.config.backend !== "" && force || tf2.getBackend() !== this.config.backend) { if (this.config.backend && this.config.backend !== "" && force || tf.getBackend() !== this.config.backend) {
this.state = "backend"; this.state = "backend";
this.log("setting backend:", this.config.backend); this.log("setting backend:", this.config.backend);
if (this.config.backend === "wasm") { if (this.config.backend === "wasm") {
this.log("settings wasm path:", this.config.wasmPath); this.log("settings wasm path:", this.config.wasmPath);
dist.setWasmPaths(this.config.wasmPath); dist.setWasmPaths(this.config.wasmPath);
const simd = await tf2.env().getAsync("WASM_HAS_SIMD_SUPPORT"); const simd = await tf.env().getAsync("WASM_HAS_SIMD_SUPPORT");
if (!simd) if (!simd)
this.log("warning: wasm simd support is not enabled"); this.log("warning: wasm simd support is not enabled");
} }
await tf2.setBackend(this.config.backend); await tf.setBackend(this.config.backend);
tf2.enableProdMode(); tf.enableProdMode();
if (this.config.backend === "webgl") { if (this.config.backend === "webgl") {
if (this.config.deallocate) { if (this.config.deallocate) {
this.log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", this.config.deallocate); this.log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", this.config.deallocate);
tf2.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", this.config.deallocate ? 0 : -1); tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", this.config.deallocate ? 0 : -1);
} }
tf2.ENV.set("WEBGL_PACK_DEPTHWISECONV", true); tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", true);
} }
await tf2.ready(); await tf.ready();
} }
const current = Math.trunc(now() - timeStamp); const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.backend || 0)) if (current > (this.perf.backend || 0))
@ -24048,10 +24036,10 @@ class Human {
await this.checkBackend(); await this.checkBackend();
await this.load(); await this.load();
if (this.config.scoped) if (this.config.scoped)
tf2.engine().startScope(); tf.engine().startScope();
this.analyze("Start Scope:"); this.analyze("Start Scope:");
timeStamp = now(); timeStamp = now();
const process3 = image2.process(input, this.config); const process3 = image.process(input, this.config);
this.perf.image = Math.trunc(now() - timeStamp); this.perf.image = Math.trunc(now() - timeStamp);
this.analyze("Get Image:"); this.analyze("Get Image:");
if (this.config.async) { if (this.config.async) {
@ -24092,7 +24080,7 @@ class Human {
} }
process3.tensor.dispose(); process3.tensor.dispose();
if (this.config.scoped) if (this.config.scoped)
tf2.engine().endScope(); tf.engine().endScope();
this.analyze("End Scope:"); this.analyze("End Scope:");
let gestureRes = []; let gestureRes = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {

File diff suppressed because one or more lines are too long

476
dist/human.node.js vendored

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

138
dist/human.node.json vendored
View File

@ -9,8 +9,11 @@
"imports": [] "imports": []
}, },
"src/age/age.js": { "src/age/age.js": {
"bytes": 1926, "bytes": 1912,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/profile.js" "path": "src/profile.js"
} }
@ -58,20 +61,30 @@
"imports": [] "imports": []
}, },
"src/body/modelBase.js": { "src/body/modelBase.js": {
"bytes": 901, "bytes": 874,
"imports": [] "imports": [
{
"path": "src/tf.js"
}
]
}, },
"src/body/modelMobileNet.js": { "src/body/modelMobileNet.js": {
"bytes": 611, "bytes": 584,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/body/modelBase.js" "path": "src/body/modelBase.js"
} }
] ]
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytes": 1940, "bytes": 1922,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/body/modelMobileNet.js" "path": "src/body/modelMobileNet.js"
}, },
@ -120,24 +133,38 @@
] ]
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytes": 2942, "bytes": 2928,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/profile.js" "path": "src/profile.js"
} }
] ]
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytes": 6955, "bytes": 6941,
"imports": [] "imports": [
{
"path": "src/tf.js"
}
]
}, },
"src/face/box.js": { "src/face/box.js": {
"bytes": 1947, "bytes": 1920,
"imports": [] "imports": [
{
"path": "src/tf.js"
}
]
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytes": 2621, "bytes": 2604,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/face/blazeface.js" "path": "src/face/blazeface.js"
}, },
@ -156,8 +183,11 @@
] ]
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytes": 13713, "bytes": 13686,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/face/box.js" "path": "src/face/box.js"
}, },
@ -186,8 +216,11 @@
"imports": [] "imports": []
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytes": 3207, "bytes": 3193,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/profile.js" "path": "src/profile.js"
} }
@ -202,20 +235,30 @@
"imports": [] "imports": []
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytes": 3238, "bytes": 3211,
"imports": [] "imports": [
{
"path": "src/tf.js"
}
]
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytes": 4248, "bytes": 4221,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/hand/box.js" "path": "src/hand/box.js"
} }
] ]
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytes": 7559, "bytes": 7532,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/hand/box.js" "path": "src/hand/box.js"
}, },
@ -225,8 +268,11 @@
] ]
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytes": 3047, "bytes": 3026,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/hand/handdetector.js" "path": "src/hand/handdetector.js"
}, },
@ -243,8 +289,11 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 14315, "bytes": 14448,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/face/facemesh.js" "path": "src/face/facemesh.js"
}, },
@ -281,8 +330,11 @@
] ]
}, },
"src/image.js": { "src/image.js": {
"bytes": 4666, "bytes": 4638,
"imports": [ "imports": [
{
"path": "src/tf.js"
},
{ {
"path": "src/imagefx.js" "path": "src/imagefx.js"
} }
@ -295,55 +347,59 @@
"src/profile.js": { "src/profile.js": {
"bytes": 1061, "bytes": 1061,
"imports": [] "imports": []
},
"src/tf.js": {
"bytes": 871,
"imports": []
} }
}, },
"outputs": { "outputs": {
"dist/human.node-nobundle.js.map": { "dist/human.node-nobundle.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 794759 "bytes": 781095
}, },
"dist/human.node-nobundle.js": { "dist/human.node-nobundle.js": {
"imports": [], "imports": [],
"inputs": { "inputs": {
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytesInOutput": 7083 "bytesInOutput": 6974
}, },
"src/face/keypoints.js": { "src/face/keypoints.js": {
"bytesInOutput": 2771 "bytesInOutput": 2771
}, },
"src/face/box.js": { "src/face/box.js": {
"bytesInOutput": 2133 "bytesInOutput": 2059
}, },
"src/face/util.js": { "src/face/util.js": {
"bytesInOutput": 3054 "bytesInOutput": 3054
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytesInOutput": 12462 "bytesInOutput": 12381
}, },
"src/face/uvcoords.js": { "src/face/uvcoords.js": {
"bytesInOutput": 20586 "bytesInOutput": 20586
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytesInOutput": 2651 "bytesInOutput": 2569
}, },
"src/profile.js": { "src/profile.js": {
"bytesInOutput": 1120 "bytesInOutput": 1120
}, },
"src/age/age.js": { "src/age/age.js": {
"bytesInOutput": 1910 "bytesInOutput": 1829
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytesInOutput": 3070 "bytesInOutput": 2983
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytesInOutput": 2786 "bytesInOutput": 2700
}, },
"src/body/modelBase.js": { "src/body/modelBase.js": {
"bytesInOutput": 942 "bytesInOutput": 868
}, },
"src/body/modelMobileNet.js": { "src/body/modelMobileNet.js": {
"bytesInOutput": 548 "bytesInOutput": 473
}, },
"src/body/heapSort.js": { "src/body/heapSort.js": {
"bytesInOutput": 1639 "bytesInOutput": 1639
@ -367,22 +423,22 @@
"bytesInOutput": 2418 "bytesInOutput": 2418
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytesInOutput": 2092 "bytesInOutput": 2015
}, },
"src/body/posenet.js": { "src/body/posenet.js": {
"bytesInOutput": 977 "bytesInOutput": 977
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytesInOutput": 4504 "bytesInOutput": 4404
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytesInOutput": 7242 "bytesInOutput": 7167
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
"bytesInOutput": 256592 "bytesInOutput": 256592
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytesInOutput": 3148 "bytesInOutput": 3067
}, },
"src/gesture.js": { "src/gesture.js": {
"bytesInOutput": 3042 "bytesInOutput": 3042
@ -391,13 +447,13 @@
"bytesInOutput": 20099 "bytesInOutput": 20099
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 4563 "bytesInOutput": 4485
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 47 "bytesInOutput": 47
}, },
"src/human.js": { "src/tf.js": {
"bytesInOutput": 152 "bytesInOutput": 193
}, },
"src/face/triangulation.js": { "src/face/triangulation.js": {
"bytesInOutput": 17898 "bytesInOutput": 17898
@ -406,13 +462,13 @@
"bytesInOutput": 227 "bytesInOutput": 227
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytesInOutput": 3107 "bytesInOutput": 3037
}, },
"src/hand/util.js": { "src/hand/util.js": {
"bytesInOutput": 2884 "bytesInOutput": 2884
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 187 "bytesInOutput": 186
}, },
"config.js": { "config.js": {
"bytesInOutput": 1933 "bytesInOutput": 1933
@ -424,10 +480,10 @@
"bytesInOutput": 23 "bytesInOutput": 23
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 11797 "bytesInOutput": 12003
} }
}, },
"bytes": 419228 "bytes": 418258
} }
} }
} }

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf, loadGraphModel } from '../tf.js';
import * as profile from '../profile.js'; import * as profile from '../profile.js';
const models = {}; const models = {};
@ -10,7 +10,7 @@ const zoom = [0, 0]; // 0..1 meaning 0%..100%
async function load(config) { async function load(config) {
if (!models.age) { if (!models.age) {
models.age = await tf.loadGraphModel(config.face.age.modelPath); models.age = await loadGraphModel(config.face.age.modelPath);
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log(`Human: load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`);
} }

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf } from '../tf.js';
import * as kpt from './keypoints'; import * as kpt from './keypoints';
function getPointsConfidence(heatmapScores, heatMapCoords) { function getPointsConfidence(heatmapScores, heatMapCoords) {

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf } from '../tf.js';
class BaseModel { class BaseModel {
constructor(model, outputStride) { constructor(model, outputStride) {

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf } from '../tf.js';
import * as modelBase from './modelBase'; import * as modelBase from './modelBase';
class MobileNet extends modelBase.BaseModel { class MobileNet extends modelBase.BaseModel {

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { loadGraphModel } from '../tf.js';
import * as modelMobileNet from './modelMobileNet'; import * as modelMobileNet from './modelMobileNet';
import * as decodeMultiple from './decodeMultiple'; import * as decodeMultiple from './decodeMultiple';
import * as util from './util'; import * as util from './util';
@ -38,7 +38,7 @@ class PoseNet {
exports.PoseNet = PoseNet; exports.PoseNet = PoseNet;
async function load(config) { async function load(config) {
const graphModel = await tf.loadGraphModel(config.body.modelPath); const graphModel = await loadGraphModel(config.body.modelPath);
const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride); const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log(`Human: load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`);

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf, loadGraphModel } from '../tf.js';
import * as profile from '../profile.js'; import * as profile from '../profile.js';
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surpise', 'neutral']; const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];
@ -13,7 +13,7 @@ const scale = 1; // score multiplication factor
async function load(config) { async function load(config) {
if (!models.emotion) { if (!models.emotion) {
models.emotion = await tf.loadGraphModel(config.face.emotion.modelPath); models.emotion = await loadGraphModel(config.face.emotion.modelPath);
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log(`Human: load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
} }

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf, loadGraphModel } from '../tf.js';
const NUM_LANDMARKS = 6; const NUM_LANDMARKS = 6;
@ -164,7 +164,7 @@ class BlazeFaceModel {
} }
async function load(config) { async function load(config) {
const blazeface = await tf.loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }); const blazeface = await loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') });
const model = new BlazeFaceModel(blazeface, config); const model = new BlazeFaceModel(blazeface, config);
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log(`Human: load model: ${config.detector.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config.detector.modelPath.match(/\/(.*)\./)[1]}`);

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf } from '../tf.js';
function scaleBoxCoordinates(box, factor) { function scaleBoxCoordinates(box, factor) {
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]]; const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf, loadGraphModel } from '../tf.js';
import * as blazeface from './blazeface'; import * as blazeface from './blazeface';
import * as keypoints from './keypoints'; import * as keypoints from './keypoints';
import * as pipe from './facepipeline'; import * as pipe from './facepipeline';
@ -44,8 +44,8 @@ class MediaPipeFaceMesh {
async function load(config) { async function load(config) {
const models = await Promise.all([ const models = await Promise.all([
blazeface.load(config), blazeface.load(config),
tf.loadGraphModel(config.mesh.modelPath, { fromTFHub: config.mesh.modelPath.includes('tfhub.dev') }), loadGraphModel(config.mesh.modelPath, { fromTFHub: config.mesh.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.iris.modelPath, { fromTFHub: config.iris.modelPath.includes('tfhub.dev') }), loadGraphModel(config.iris.modelPath, { fromTFHub: config.iris.modelPath.includes('tfhub.dev') }),
]); ]);
const faceMesh = new MediaPipeFaceMesh(models[0], models[1], models[2], config); const faceMesh = new MediaPipeFaceMesh(models[0], models[1], models[2], config);
// eslint-disable-next-line no-console // eslint-disable-next-line no-console

View File

@ -1,5 +1,5 @@
/* eslint-disable class-methods-use-this */ /* eslint-disable class-methods-use-this */
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf } from '../tf.js';
import * as bounding from './box'; import * as bounding from './box';
import * as keypoints from './keypoints'; import * as keypoints from './keypoints';
import * as util from './util'; import * as util from './util';

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf, loadGraphModel } from '../tf.js';
import * as profile from '../profile.js'; import * as profile from '../profile.js';
const models = {}; const models = {};
@ -12,7 +12,7 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
async function load(config) { async function load(config) {
if (!models.gender) { if (!models.gender) {
models.gender = await tf.loadGraphModel(config.face.gender.modelPath); models.gender = await loadGraphModel(config.face.gender.modelPath);
alternative = models.gender.inputs[0].shape[3] === 1; alternative = models.gender.inputs[0].shape[3] === 1;
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log(`Human: load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`); console.log(`Human: load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`);

View File

@ -14,7 +14,7 @@
* limitations under the License. * limitations under the License.
* ============================================================================= * =============================================================================
*/ */
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf } from '../tf.js';
function getBoxSize(box) { function getBoxSize(box) {
return [ return [

View File

@ -15,7 +15,7 @@
* ============================================================================= * =============================================================================
*/ */
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf } from '../tf.js';
import * as box from './box'; import * as box from './box';
class HandDetector { class HandDetector {

View File

@ -15,7 +15,7 @@
* ============================================================================= * =============================================================================
*/ */
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf } from '../tf.js';
import * as box from './box'; import * as box from './box';
import * as util from './util'; import * as util from './util';

View File

@ -16,7 +16,7 @@
*/ */
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html // https://storage.googleapis.com/tfjs-models/demos/handpose/index.html
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { loadGraphModel } from '../tf.js';
import * as handdetector from './handdetector'; import * as handdetector from './handdetector';
import * as pipeline from './handpipeline'; import * as pipeline from './handpipeline';
import * as anchors from './anchors'; import * as anchors from './anchors';
@ -69,8 +69,8 @@ exports.HandPose = HandPose;
async function load(config) { async function load(config) {
const [handDetectorModel, handPoseModel] = await Promise.all([ const [handDetectorModel, handPoseModel] = await Promise.all([
tf.loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }), loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }), loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }),
]); ]);
const detector = new handdetector.HandDetector(handDetectorModel, config.inputSize, anchors.anchors); const detector = new handdetector.HandDetector(handDetectorModel, config.inputSize, anchors.anchors);
const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.inputSize); const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.inputSize);

View File

@ -1,5 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf, setWasmPaths } from './tf.js';
import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';
import * as facemesh from './face/facemesh.js'; import * as facemesh from './face/facemesh.js';
import * as age from './age/age.js'; import * as age from './age/age.js';
import * as gender from './gender/gender.js'; import * as gender from './gender/gender.js';
@ -131,12 +130,12 @@ class Human {
this.models.posenet, this.models.posenet,
this.models.handpose, this.models.handpose,
] = await Promise.all([ ] = await Promise.all([
this.models.age || age.load(this.config), this.config.face.age.enabled ? this.models.age || age.load(this.config) : null,
this.models.gender || gender.load(this.config), this.config.face.gender.enabled ? this.models.gender || gender.load(this.config) : null,
this.models.emotion || emotion.load(this.config), this.config.face.emotion.enabled ? this.models.emotion || emotion.load(this.config) : null,
this.models.facemesh || facemesh.load(this.config.face), this.config.face.enabled ? this.models.facemesh || facemesh.load(this.config.face) : null,
this.models.posenet || posenet.load(this.config), this.config.body.enabled ? this.models.posenet || posenet.load(this.config) : null,
this.models.handpose || handpose.load(this.config.hand), this.config.hand.enabled ? this.models.handpose || handpose.load(this.config.hand) : null,
]); ]);
} else { } else {
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face); if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);

View File

@ -1,4 +1,4 @@
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js'; import { tf } from './tf.js';
import * as fxImage from './imagefx.js'; import * as fxImage from './imagefx.js';
// internal temp canvases // internal temp canvases

19
src/tf.js Normal file
View File

@ -0,0 +1,19 @@
// monolithic: bundle 3.4M
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';
import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';
const loadGraphModel = tf.loadGraphModel;
export { tf, setWasmPaths, loadGraphModel };
// modular: bundle 4.2M
/*
import * as tf from '@tensorflow/tfjs-core/dist/tf-core.es2017.js';
import { loadGraphModel } from '@tensorflow/tfjs-converter/dist/tf-converter.es2017.js';
import * as tfCPU from '@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js';
import * as tfWebGL from '@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.es2017.js';
import { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';
const version = { core: tf.version, cpu: tfCPU.version_cpu, webgl: tfWebGL.version_webgl, wasm: version_wasm };
export { tf, setWasmPaths, loadGraphModel, version };
*/