mirror of https://github.com/vladmandic/human
simplify face box coordinate calculations
parent
ac5e696cc1
commit
0ee6b8b00c
|
@ -6,11 +6,11 @@ import GLBench from './gl-bench.js';
|
||||||
// const userConfig = { backend: 'webgl' }; // add any user configuration overrides
|
// const userConfig = { backend: 'webgl' }; // add any user configuration overrides
|
||||||
|
|
||||||
const userConfig = {
|
const userConfig = {
|
||||||
backend: 'webgl',
|
backend: 'wasm',
|
||||||
async: false,
|
async: false,
|
||||||
warmup: 'face',
|
warmup: 'face',
|
||||||
videoOptimized: false,
|
videoOptimized: false,
|
||||||
face: { enabled: true, mesh: { enabled: false }, iris: { enabled: false }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: false }, embedding: { enabled: true } },
|
face: { enabled: true, mesh: { enabled: true }, iris: { enabled: false }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: false }, embedding: { enabled: true } },
|
||||||
hand: { enabled: false },
|
hand: { enabled: false },
|
||||||
gesture: { enabled: false },
|
gesture: { enabled: false },
|
||||||
body: { enabled: false, modelPath: '../models/blazepose.json' },
|
body: { enabled: false, modelPath: '../models/blazepose.json' },
|
||||||
|
@ -38,7 +38,7 @@ const ui = {
|
||||||
detectFPS: [], // internal, holds fps values for detection performance
|
detectFPS: [], // internal, holds fps values for detection performance
|
||||||
drawFPS: [], // internal, holds fps values for draw performance
|
drawFPS: [], // internal, holds fps values for draw performance
|
||||||
buffered: false, // experimental, should output be buffered between frames
|
buffered: false, // experimental, should output be buffered between frames
|
||||||
drawWarmup: true, // debug only, should warmup image processing be displayed on startup
|
drawWarmup: false, // debug only, should warmup image processing be displayed on startup
|
||||||
drawThread: null, // internl, perform draw operations in a separate thread
|
drawThread: null, // internl, perform draw operations in a separate thread
|
||||||
detectThread: null, // internl, perform detect operations in a separate thread
|
detectThread: null, // internl, perform detect operations in a separate thread
|
||||||
framesDraw: 0, // internal, statistics on frames drawn
|
framesDraw: 0, // internal, statistics on frames drawn
|
||||||
|
|
|
@ -75502,22 +75502,19 @@ function decodeBounds(boxOutputs, anchors3, inputSize) {
|
||||||
}
|
}
|
||||||
var BlazeFaceModel = class {
|
var BlazeFaceModel = class {
|
||||||
constructor(model7, config3) {
|
constructor(model7, config3) {
|
||||||
this.blazeFaceModel = model7;
|
this.model = model7;
|
||||||
this.width = model7.inputs[0].shape[2];
|
|
||||||
this.height = model7.inputs[0].shape[1];
|
|
||||||
this.anchorsData = generateAnchors(model7.inputs[0].shape[1]);
|
this.anchorsData = generateAnchors(model7.inputs[0].shape[1]);
|
||||||
this.anchors = tensor2d(this.anchorsData);
|
this.anchors = tensor2d(this.anchorsData);
|
||||||
this.inputSize = tensor1d([this.width, this.height]);
|
this.inputSize = model7.inputs[0].shape[2];
|
||||||
this.config = config3;
|
this.config = config3;
|
||||||
this.scaleFaces = 0.8;
|
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage) {
|
async getBoundingBoxes(inputImage) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [batch, boxes, scores] = tidy(() => {
|
const [batch, boxes, scores] = tidy(() => {
|
||||||
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
const resizedImage = inputImage.resizeBilinear([this.inputSize, this.inputSize]);
|
||||||
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
||||||
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
const batchedPrediction = this.model.predict(normalizedImage);
|
||||||
let batchOut;
|
let batchOut;
|
||||||
if (Array.isArray(batchedPrediction)) {
|
if (Array.isArray(batchedPrediction)) {
|
||||||
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
||||||
|
@ -75528,7 +75525,7 @@ var BlazeFaceModel = class {
|
||||||
} else {
|
} else {
|
||||||
batchOut = batchedPrediction.squeeze();
|
batchOut = batchedPrediction.squeeze();
|
||||||
}
|
}
|
||||||
const boxesOut = decodeBounds(batchOut, this.anchors, this.inputSize);
|
const boxesOut = decodeBounds(batchOut, this.anchors, [this.inputSize, this.inputSize]);
|
||||||
const logits = slice(batchOut, [0, 0], [-1, 1]);
|
const logits = slice(batchOut, [0, 0], [-1, 1]);
|
||||||
const scoresOut = sigmoid(logits).squeeze();
|
const scoresOut = sigmoid(logits).squeeze();
|
||||||
return [batchOut, boxesOut, scoresOut];
|
return [batchOut, boxesOut, scoresOut];
|
||||||
|
@ -75559,7 +75556,7 @@ var BlazeFaceModel = class {
|
||||||
scores.dispose();
|
scores.dispose();
|
||||||
return {
|
return {
|
||||||
boxes: annotatedBoxes,
|
boxes: annotatedBoxes,
|
||||||
scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height]
|
scaleFactor: [inputImage.shape[2] / this.inputSize, inputImage.shape[1] / this.inputSize]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -78991,8 +78988,8 @@ var Pipeline = class {
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.meshDetector = meshDetector;
|
||||||
this.irisModel = irisModel;
|
this.irisModel = irisModel;
|
||||||
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
||||||
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _b.inputs[0].shape[2]);
|
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _b.inputs[0].shape[2]);
|
||||||
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
||||||
this.irisEnlarge = 2.3;
|
this.irisEnlarge = 2.3;
|
||||||
this.skipped = 0;
|
this.skipped = 0;
|
||||||
|
@ -79000,10 +78997,9 @@ var Pipeline = class {
|
||||||
}
|
}
|
||||||
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
||||||
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
||||||
const scaleFactor = [boxSize[0] / this.meshSize, boxSize[1] / this.boxSize];
|
|
||||||
const coordsScaled = rawCoords.map((coord) => [
|
const coordsScaled = rawCoords.map((coord) => [
|
||||||
scaleFactor[0] * (coord[0] - this.boxSize / 2),
|
boxSize[0] / this.meshSize * (coord[0] - this.meshSize / 2),
|
||||||
scaleFactor[1] * (coord[1] - this.boxSize / 2),
|
boxSize[1] / this.meshSize * (coord[1] - this.meshSize / 2),
|
||||||
coord[2]
|
coord[2]
|
||||||
]);
|
]);
|
||||||
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
||||||
|
@ -79116,11 +79112,17 @@ var Pipeline = class {
|
||||||
const faceCenterNormalized = [faceCenter[0] / input2.shape[2], faceCenter[1] / input2.shape[1]];
|
const faceCenterNormalized = [faceCenter[0] / input2.shape[2], faceCenter[1] / input2.shape[1]];
|
||||||
const rotatedImage = image.rotateWithOffset(input2, angle, 0, faceCenterNormalized);
|
const rotatedImage = image.rotateWithOffset(input2, angle, 0, faceCenterNormalized);
|
||||||
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
||||||
|
if (config3.face.mesh.enabled)
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
} else {
|
} else {
|
||||||
rotationMatrix = IDENTITY_MATRIX;
|
rotationMatrix = IDENTITY_MATRIX;
|
||||||
const cloned = input2.clone();
|
const clonedImage = input2.clone();
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, cloned, [this.boxSize, this.boxSize]).div(255);
|
if (config3.face.mesh.enabled)
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
}
|
}
|
||||||
if (!config3.face.mesh.enabled) {
|
if (!config3.face.mesh.enabled) {
|
||||||
const prediction2 = {
|
const prediction2 = {
|
||||||
|
@ -79162,7 +79164,7 @@ var Pipeline = class {
|
||||||
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
||||||
}
|
}
|
||||||
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
||||||
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData), 1.5);
|
||||||
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
||||||
const transformedCoords = tensor2d(transformedCoordsData);
|
const transformedCoords = tensor2d(transformedCoordsData);
|
||||||
const prediction = {
|
const prediction = {
|
||||||
|
@ -79465,7 +79467,6 @@ async function predict4(image3, config3) {
|
||||||
run("emotion", profileData);
|
run("emotion", profileData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
resize.dispose();
|
|
||||||
resolve(data2);
|
resolve(data2);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -101345,11 +101346,11 @@ var gl_bench_default = GLBench;
|
||||||
|
|
||||||
// demo/browser.js
|
// demo/browser.js
|
||||||
var userConfig = {
|
var userConfig = {
|
||||||
backend: "webgl",
|
backend: "wasm",
|
||||||
async: false,
|
async: false,
|
||||||
warmup: "face",
|
warmup: "face",
|
||||||
videoOptimized: false,
|
videoOptimized: false,
|
||||||
face: {enabled: true, mesh: {enabled: false}, iris: {enabled: false}, age: {enabled: false}, gender: {enabled: false}, emotion: {enabled: false}, embedding: {enabled: true}},
|
face: {enabled: true, mesh: {enabled: true}, iris: {enabled: false}, age: {enabled: false}, gender: {enabled: false}, emotion: {enabled: false}, embedding: {enabled: true}},
|
||||||
hand: {enabled: false},
|
hand: {enabled: false},
|
||||||
gesture: {enabled: false},
|
gesture: {enabled: false},
|
||||||
body: {enabled: false, modelPath: "../models/blazepose.json"}
|
body: {enabled: false, modelPath: "../models/blazepose.json"}
|
||||||
|
@ -101374,7 +101375,7 @@ var ui = {
|
||||||
detectFPS: [],
|
detectFPS: [],
|
||||||
drawFPS: [],
|
drawFPS: [],
|
||||||
buffered: false,
|
buffered: false,
|
||||||
drawWarmup: true,
|
drawWarmup: false,
|
||||||
drawThread: null,
|
drawThread: null,
|
||||||
detectThread: null,
|
detectThread: null,
|
||||||
framesDraw: 0,
|
framesDraw: 0,
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -406,22 +406,19 @@ function decodeBounds(boxOutputs, anchors3, inputSize) {
|
||||||
}
|
}
|
||||||
var BlazeFaceModel = class {
|
var BlazeFaceModel = class {
|
||||||
constructor(model6, config3) {
|
constructor(model6, config3) {
|
||||||
this.blazeFaceModel = model6;
|
this.model = model6;
|
||||||
this.width = model6.inputs[0].shape[2];
|
|
||||||
this.height = model6.inputs[0].shape[1];
|
|
||||||
this.anchorsData = generateAnchors(model6.inputs[0].shape[1]);
|
this.anchorsData = generateAnchors(model6.inputs[0].shape[1]);
|
||||||
this.anchors = tf2.tensor2d(this.anchorsData);
|
this.anchors = tf2.tensor2d(this.anchorsData);
|
||||||
this.inputSize = tf2.tensor1d([this.width, this.height]);
|
this.inputSize = model6.inputs[0].shape[2];
|
||||||
this.config = config3;
|
this.config = config3;
|
||||||
this.scaleFaces = 0.8;
|
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage) {
|
async getBoundingBoxes(inputImage) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [batch, boxes, scores] = tf2.tidy(() => {
|
const [batch, boxes, scores] = tf2.tidy(() => {
|
||||||
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
const resizedImage = inputImage.resizeBilinear([this.inputSize, this.inputSize]);
|
||||||
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
||||||
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
const batchedPrediction = this.model.predict(normalizedImage);
|
||||||
let batchOut;
|
let batchOut;
|
||||||
if (Array.isArray(batchedPrediction)) {
|
if (Array.isArray(batchedPrediction)) {
|
||||||
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
||||||
|
@ -432,7 +429,7 @@ var BlazeFaceModel = class {
|
||||||
} else {
|
} else {
|
||||||
batchOut = batchedPrediction.squeeze();
|
batchOut = batchedPrediction.squeeze();
|
||||||
}
|
}
|
||||||
const boxesOut = decodeBounds(batchOut, this.anchors, this.inputSize);
|
const boxesOut = decodeBounds(batchOut, this.anchors, [this.inputSize, this.inputSize]);
|
||||||
const logits = tf2.slice(batchOut, [0, 0], [-1, 1]);
|
const logits = tf2.slice(batchOut, [0, 0], [-1, 1]);
|
||||||
const scoresOut = tf2.sigmoid(logits).squeeze();
|
const scoresOut = tf2.sigmoid(logits).squeeze();
|
||||||
return [batchOut, boxesOut, scoresOut];
|
return [batchOut, boxesOut, scoresOut];
|
||||||
|
@ -463,7 +460,7 @@ var BlazeFaceModel = class {
|
||||||
scores.dispose();
|
scores.dispose();
|
||||||
return {
|
return {
|
||||||
boxes: annotatedBoxes,
|
boxes: annotatedBoxes,
|
||||||
scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height]
|
scaleFactor: [inputImage.shape[2] / this.inputSize, inputImage.shape[1] / this.inputSize]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -3899,8 +3896,8 @@ var Pipeline = class {
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.meshDetector = meshDetector;
|
||||||
this.irisModel = irisModel;
|
this.irisModel = irisModel;
|
||||||
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
||||||
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _b.inputs[0].shape[2]);
|
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _b.inputs[0].shape[2]);
|
||||||
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
||||||
this.irisEnlarge = 2.3;
|
this.irisEnlarge = 2.3;
|
||||||
this.skipped = 0;
|
this.skipped = 0;
|
||||||
|
@ -3908,10 +3905,9 @@ var Pipeline = class {
|
||||||
}
|
}
|
||||||
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
||||||
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
||||||
const scaleFactor = [boxSize[0] / this.meshSize, boxSize[1] / this.boxSize];
|
|
||||||
const coordsScaled = rawCoords.map((coord) => [
|
const coordsScaled = rawCoords.map((coord) => [
|
||||||
scaleFactor[0] * (coord[0] - this.boxSize / 2),
|
boxSize[0] / this.meshSize * (coord[0] - this.meshSize / 2),
|
||||||
scaleFactor[1] * (coord[1] - this.boxSize / 2),
|
boxSize[1] / this.meshSize * (coord[1] - this.meshSize / 2),
|
||||||
coord[2]
|
coord[2]
|
||||||
]);
|
]);
|
||||||
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
||||||
|
@ -4024,11 +4020,17 @@ var Pipeline = class {
|
||||||
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
|
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
|
||||||
const rotatedImage = tf4.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
const rotatedImage = tf4.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
||||||
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
||||||
|
if (config3.face.mesh.enabled)
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
} else {
|
} else {
|
||||||
rotationMatrix = IDENTITY_MATRIX;
|
rotationMatrix = IDENTITY_MATRIX;
|
||||||
const cloned = input.clone();
|
const clonedImage = input.clone();
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, cloned, [this.boxSize, this.boxSize]).div(255);
|
if (config3.face.mesh.enabled)
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
}
|
}
|
||||||
if (!config3.face.mesh.enabled) {
|
if (!config3.face.mesh.enabled) {
|
||||||
const prediction2 = {
|
const prediction2 = {
|
||||||
|
@ -4070,7 +4072,7 @@ var Pipeline = class {
|
||||||
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
||||||
}
|
}
|
||||||
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
||||||
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData), 1.5);
|
||||||
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
||||||
const transformedCoords = tf4.tensor2d(transformedCoordsData);
|
const transformedCoords = tf4.tensor2d(transformedCoordsData);
|
||||||
const prediction = {
|
const prediction = {
|
||||||
|
@ -4377,7 +4379,6 @@ async function predict4(image13, config3) {
|
||||||
run("emotion", profileData);
|
run("emotion", profileData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
resize.dispose();
|
|
||||||
resolve(data3);
|
resolve(data3);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -75502,22 +75502,19 @@ function decodeBounds(boxOutputs, anchors3, inputSize) {
|
||||||
}
|
}
|
||||||
var BlazeFaceModel = class {
|
var BlazeFaceModel = class {
|
||||||
constructor(model7, config3) {
|
constructor(model7, config3) {
|
||||||
this.blazeFaceModel = model7;
|
this.model = model7;
|
||||||
this.width = model7.inputs[0].shape[2];
|
|
||||||
this.height = model7.inputs[0].shape[1];
|
|
||||||
this.anchorsData = generateAnchors(model7.inputs[0].shape[1]);
|
this.anchorsData = generateAnchors(model7.inputs[0].shape[1]);
|
||||||
this.anchors = tensor2d(this.anchorsData);
|
this.anchors = tensor2d(this.anchorsData);
|
||||||
this.inputSize = tensor1d([this.width, this.height]);
|
this.inputSize = model7.inputs[0].shape[2];
|
||||||
this.config = config3;
|
this.config = config3;
|
||||||
this.scaleFaces = 0.8;
|
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage) {
|
async getBoundingBoxes(inputImage) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [batch, boxes, scores] = tidy(() => {
|
const [batch, boxes, scores] = tidy(() => {
|
||||||
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
const resizedImage = inputImage.resizeBilinear([this.inputSize, this.inputSize]);
|
||||||
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
||||||
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
const batchedPrediction = this.model.predict(normalizedImage);
|
||||||
let batchOut;
|
let batchOut;
|
||||||
if (Array.isArray(batchedPrediction)) {
|
if (Array.isArray(batchedPrediction)) {
|
||||||
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
||||||
|
@ -75528,7 +75525,7 @@ var BlazeFaceModel = class {
|
||||||
} else {
|
} else {
|
||||||
batchOut = batchedPrediction.squeeze();
|
batchOut = batchedPrediction.squeeze();
|
||||||
}
|
}
|
||||||
const boxesOut = decodeBounds(batchOut, this.anchors, this.inputSize);
|
const boxesOut = decodeBounds(batchOut, this.anchors, [this.inputSize, this.inputSize]);
|
||||||
const logits = slice(batchOut, [0, 0], [-1, 1]);
|
const logits = slice(batchOut, [0, 0], [-1, 1]);
|
||||||
const scoresOut = sigmoid(logits).squeeze();
|
const scoresOut = sigmoid(logits).squeeze();
|
||||||
return [batchOut, boxesOut, scoresOut];
|
return [batchOut, boxesOut, scoresOut];
|
||||||
|
@ -75559,7 +75556,7 @@ var BlazeFaceModel = class {
|
||||||
scores.dispose();
|
scores.dispose();
|
||||||
return {
|
return {
|
||||||
boxes: annotatedBoxes,
|
boxes: annotatedBoxes,
|
||||||
scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height]
|
scaleFactor: [inputImage.shape[2] / this.inputSize, inputImage.shape[1] / this.inputSize]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -78991,8 +78988,8 @@ var Pipeline = class {
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.meshDetector = meshDetector;
|
||||||
this.irisModel = irisModel;
|
this.irisModel = irisModel;
|
||||||
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
||||||
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _b.inputs[0].shape[2]);
|
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _b.inputs[0].shape[2]);
|
||||||
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
||||||
this.irisEnlarge = 2.3;
|
this.irisEnlarge = 2.3;
|
||||||
this.skipped = 0;
|
this.skipped = 0;
|
||||||
|
@ -79000,10 +78997,9 @@ var Pipeline = class {
|
||||||
}
|
}
|
||||||
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
||||||
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
||||||
const scaleFactor = [boxSize[0] / this.meshSize, boxSize[1] / this.boxSize];
|
|
||||||
const coordsScaled = rawCoords.map((coord) => [
|
const coordsScaled = rawCoords.map((coord) => [
|
||||||
scaleFactor[0] * (coord[0] - this.boxSize / 2),
|
boxSize[0] / this.meshSize * (coord[0] - this.meshSize / 2),
|
||||||
scaleFactor[1] * (coord[1] - this.boxSize / 2),
|
boxSize[1] / this.meshSize * (coord[1] - this.meshSize / 2),
|
||||||
coord[2]
|
coord[2]
|
||||||
]);
|
]);
|
||||||
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
||||||
|
@ -79116,11 +79112,17 @@ var Pipeline = class {
|
||||||
const faceCenterNormalized = [faceCenter[0] / input2.shape[2], faceCenter[1] / input2.shape[1]];
|
const faceCenterNormalized = [faceCenter[0] / input2.shape[2], faceCenter[1] / input2.shape[1]];
|
||||||
const rotatedImage = image.rotateWithOffset(input2, angle, 0, faceCenterNormalized);
|
const rotatedImage = image.rotateWithOffset(input2, angle, 0, faceCenterNormalized);
|
||||||
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
||||||
|
if (config3.face.mesh.enabled)
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
} else {
|
} else {
|
||||||
rotationMatrix = IDENTITY_MATRIX;
|
rotationMatrix = IDENTITY_MATRIX;
|
||||||
const cloned = input2.clone();
|
const clonedImage = input2.clone();
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, cloned, [this.boxSize, this.boxSize]).div(255);
|
if (config3.face.mesh.enabled)
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
}
|
}
|
||||||
if (!config3.face.mesh.enabled) {
|
if (!config3.face.mesh.enabled) {
|
||||||
const prediction2 = {
|
const prediction2 = {
|
||||||
|
@ -79162,7 +79164,7 @@ var Pipeline = class {
|
||||||
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
||||||
}
|
}
|
||||||
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
||||||
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData), 1.5);
|
||||||
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
||||||
const transformedCoords = tensor2d(transformedCoordsData);
|
const transformedCoords = tensor2d(transformedCoordsData);
|
||||||
const prediction = {
|
const prediction = {
|
||||||
|
@ -79465,7 +79467,6 @@ async function predict4(image3, config3) {
|
||||||
run("emotion", profileData);
|
run("emotion", profileData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
resize.dispose();
|
|
||||||
resolve(data2);
|
resolve(data2);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -75509,22 +75509,19 @@ return a / b;`;
|
||||||
}
|
}
|
||||||
var BlazeFaceModel = class {
|
var BlazeFaceModel = class {
|
||||||
constructor(model7, config3) {
|
constructor(model7, config3) {
|
||||||
this.blazeFaceModel = model7;
|
this.model = model7;
|
||||||
this.width = model7.inputs[0].shape[2];
|
|
||||||
this.height = model7.inputs[0].shape[1];
|
|
||||||
this.anchorsData = generateAnchors(model7.inputs[0].shape[1]);
|
this.anchorsData = generateAnchors(model7.inputs[0].shape[1]);
|
||||||
this.anchors = tensor2d(this.anchorsData);
|
this.anchors = tensor2d(this.anchorsData);
|
||||||
this.inputSize = tensor1d([this.width, this.height]);
|
this.inputSize = model7.inputs[0].shape[2];
|
||||||
this.config = config3;
|
this.config = config3;
|
||||||
this.scaleFaces = 0.8;
|
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage) {
|
async getBoundingBoxes(inputImage) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [batch, boxes, scores] = tidy(() => {
|
const [batch, boxes, scores] = tidy(() => {
|
||||||
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
const resizedImage = inputImage.resizeBilinear([this.inputSize, this.inputSize]);
|
||||||
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
||||||
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
const batchedPrediction = this.model.predict(normalizedImage);
|
||||||
let batchOut;
|
let batchOut;
|
||||||
if (Array.isArray(batchedPrediction)) {
|
if (Array.isArray(batchedPrediction)) {
|
||||||
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
||||||
|
@ -75535,7 +75532,7 @@ return a / b;`;
|
||||||
} else {
|
} else {
|
||||||
batchOut = batchedPrediction.squeeze();
|
batchOut = batchedPrediction.squeeze();
|
||||||
}
|
}
|
||||||
const boxesOut = decodeBounds(batchOut, this.anchors, this.inputSize);
|
const boxesOut = decodeBounds(batchOut, this.anchors, [this.inputSize, this.inputSize]);
|
||||||
const logits = slice(batchOut, [0, 0], [-1, 1]);
|
const logits = slice(batchOut, [0, 0], [-1, 1]);
|
||||||
const scoresOut = sigmoid(logits).squeeze();
|
const scoresOut = sigmoid(logits).squeeze();
|
||||||
return [batchOut, boxesOut, scoresOut];
|
return [batchOut, boxesOut, scoresOut];
|
||||||
|
@ -75566,7 +75563,7 @@ return a / b;`;
|
||||||
scores.dispose();
|
scores.dispose();
|
||||||
return {
|
return {
|
||||||
boxes: annotatedBoxes,
|
boxes: annotatedBoxes,
|
||||||
scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height]
|
scaleFactor: [inputImage.shape[2] / this.inputSize, inputImage.shape[1] / this.inputSize]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -78998,8 +78995,8 @@ return a / b;`;
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.meshDetector = meshDetector;
|
||||||
this.irisModel = irisModel;
|
this.irisModel = irisModel;
|
||||||
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
||||||
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _b.inputs[0].shape[2]);
|
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _b.inputs[0].shape[2]);
|
||||||
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
||||||
this.irisEnlarge = 2.3;
|
this.irisEnlarge = 2.3;
|
||||||
this.skipped = 0;
|
this.skipped = 0;
|
||||||
|
@ -79007,10 +79004,9 @@ return a / b;`;
|
||||||
}
|
}
|
||||||
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
||||||
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
||||||
const scaleFactor = [boxSize[0] / this.meshSize, boxSize[1] / this.boxSize];
|
|
||||||
const coordsScaled = rawCoords.map((coord) => [
|
const coordsScaled = rawCoords.map((coord) => [
|
||||||
scaleFactor[0] * (coord[0] - this.boxSize / 2),
|
boxSize[0] / this.meshSize * (coord[0] - this.meshSize / 2),
|
||||||
scaleFactor[1] * (coord[1] - this.boxSize / 2),
|
boxSize[1] / this.meshSize * (coord[1] - this.meshSize / 2),
|
||||||
coord[2]
|
coord[2]
|
||||||
]);
|
]);
|
||||||
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
||||||
|
@ -79123,11 +79119,17 @@ return a / b;`;
|
||||||
const faceCenterNormalized = [faceCenter[0] / input2.shape[2], faceCenter[1] / input2.shape[1]];
|
const faceCenterNormalized = [faceCenter[0] / input2.shape[2], faceCenter[1] / input2.shape[1]];
|
||||||
const rotatedImage = image.rotateWithOffset(input2, angle, 0, faceCenterNormalized);
|
const rotatedImage = image.rotateWithOffset(input2, angle, 0, faceCenterNormalized);
|
||||||
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
||||||
|
if (config3.face.mesh.enabled)
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
} else {
|
} else {
|
||||||
rotationMatrix = IDENTITY_MATRIX;
|
rotationMatrix = IDENTITY_MATRIX;
|
||||||
const cloned = input2.clone();
|
const clonedImage = input2.clone();
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, cloned, [this.boxSize, this.boxSize]).div(255);
|
if (config3.face.mesh.enabled)
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
}
|
}
|
||||||
if (!config3.face.mesh.enabled) {
|
if (!config3.face.mesh.enabled) {
|
||||||
const prediction2 = {
|
const prediction2 = {
|
||||||
|
@ -79169,7 +79171,7 @@ return a / b;`;
|
||||||
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
||||||
}
|
}
|
||||||
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
||||||
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData), 1.5);
|
||||||
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
||||||
const transformedCoords = tensor2d(transformedCoordsData);
|
const transformedCoords = tensor2d(transformedCoordsData);
|
||||||
const prediction = {
|
const prediction = {
|
||||||
|
@ -79472,7 +79474,6 @@ return a / b;`;
|
||||||
run("emotion", profileData);
|
run("emotion", profileData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
resize.dispose();
|
|
||||||
resolve(data2);
|
resolve(data2);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -386,22 +386,19 @@ function decodeBounds(boxOutputs, anchors3, inputSize) {
|
||||||
}
|
}
|
||||||
var BlazeFaceModel = class {
|
var BlazeFaceModel = class {
|
||||||
constructor(model6, config3) {
|
constructor(model6, config3) {
|
||||||
this.blazeFaceModel = model6;
|
this.model = model6;
|
||||||
this.width = model6.inputs[0].shape[2];
|
|
||||||
this.height = model6.inputs[0].shape[1];
|
|
||||||
this.anchorsData = generateAnchors(model6.inputs[0].shape[1]);
|
this.anchorsData = generateAnchors(model6.inputs[0].shape[1]);
|
||||||
this.anchors = tf2.tensor2d(this.anchorsData);
|
this.anchors = tf2.tensor2d(this.anchorsData);
|
||||||
this.inputSize = tf2.tensor1d([this.width, this.height]);
|
this.inputSize = model6.inputs[0].shape[2];
|
||||||
this.config = config3;
|
this.config = config3;
|
||||||
this.scaleFaces = 0.8;
|
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage) {
|
async getBoundingBoxes(inputImage) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [batch, boxes, scores] = tf2.tidy(() => {
|
const [batch, boxes, scores] = tf2.tidy(() => {
|
||||||
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
const resizedImage = inputImage.resizeBilinear([this.inputSize, this.inputSize]);
|
||||||
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
||||||
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
const batchedPrediction = this.model.predict(normalizedImage);
|
||||||
let batchOut;
|
let batchOut;
|
||||||
if (Array.isArray(batchedPrediction)) {
|
if (Array.isArray(batchedPrediction)) {
|
||||||
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
||||||
|
@ -412,7 +409,7 @@ var BlazeFaceModel = class {
|
||||||
} else {
|
} else {
|
||||||
batchOut = batchedPrediction.squeeze();
|
batchOut = batchedPrediction.squeeze();
|
||||||
}
|
}
|
||||||
const boxesOut = decodeBounds(batchOut, this.anchors, this.inputSize);
|
const boxesOut = decodeBounds(batchOut, this.anchors, [this.inputSize, this.inputSize]);
|
||||||
const logits = tf2.slice(batchOut, [0, 0], [-1, 1]);
|
const logits = tf2.slice(batchOut, [0, 0], [-1, 1]);
|
||||||
const scoresOut = tf2.sigmoid(logits).squeeze();
|
const scoresOut = tf2.sigmoid(logits).squeeze();
|
||||||
return [batchOut, boxesOut, scoresOut];
|
return [batchOut, boxesOut, scoresOut];
|
||||||
|
@ -443,7 +440,7 @@ var BlazeFaceModel = class {
|
||||||
scores.dispose();
|
scores.dispose();
|
||||||
return {
|
return {
|
||||||
boxes: annotatedBoxes,
|
boxes: annotatedBoxes,
|
||||||
scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height]
|
scaleFactor: [inputImage.shape[2] / this.inputSize, inputImage.shape[1] / this.inputSize]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -3879,8 +3876,8 @@ var Pipeline = class {
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.meshDetector = meshDetector;
|
||||||
this.irisModel = irisModel;
|
this.irisModel = irisModel;
|
||||||
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
||||||
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _b.inputs[0].shape[2]);
|
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _b.inputs[0].shape[2]);
|
||||||
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
||||||
this.irisEnlarge = 2.3;
|
this.irisEnlarge = 2.3;
|
||||||
this.skipped = 0;
|
this.skipped = 0;
|
||||||
|
@ -3888,10 +3885,9 @@ var Pipeline = class {
|
||||||
}
|
}
|
||||||
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
||||||
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
||||||
const scaleFactor = [boxSize[0] / this.meshSize, boxSize[1] / this.boxSize];
|
|
||||||
const coordsScaled = rawCoords.map((coord) => [
|
const coordsScaled = rawCoords.map((coord) => [
|
||||||
scaleFactor[0] * (coord[0] - this.boxSize / 2),
|
boxSize[0] / this.meshSize * (coord[0] - this.meshSize / 2),
|
||||||
scaleFactor[1] * (coord[1] - this.boxSize / 2),
|
boxSize[1] / this.meshSize * (coord[1] - this.meshSize / 2),
|
||||||
coord[2]
|
coord[2]
|
||||||
]);
|
]);
|
||||||
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
||||||
|
@ -4004,11 +4000,17 @@ var Pipeline = class {
|
||||||
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
|
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
|
||||||
const rotatedImage = tf4.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
const rotatedImage = tf4.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
||||||
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
||||||
|
if (config3.face.mesh.enabled)
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
} else {
|
} else {
|
||||||
rotationMatrix = IDENTITY_MATRIX;
|
rotationMatrix = IDENTITY_MATRIX;
|
||||||
const cloned = input.clone();
|
const clonedImage = input.clone();
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, cloned, [this.boxSize, this.boxSize]).div(255);
|
if (config3.face.mesh.enabled)
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
}
|
}
|
||||||
if (!config3.face.mesh.enabled) {
|
if (!config3.face.mesh.enabled) {
|
||||||
const prediction2 = {
|
const prediction2 = {
|
||||||
|
@ -4050,7 +4052,7 @@ var Pipeline = class {
|
||||||
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
||||||
}
|
}
|
||||||
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
||||||
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData), 1.5);
|
||||||
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
||||||
const transformedCoords = tf4.tensor2d(transformedCoordsData);
|
const transformedCoords = tf4.tensor2d(transformedCoordsData);
|
||||||
const prediction = {
|
const prediction = {
|
||||||
|
@ -4357,7 +4359,6 @@ async function predict4(image13, config3) {
|
||||||
run("emotion", profileData);
|
run("emotion", profileData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
resize.dispose();
|
|
||||||
resolve(data2);
|
resolve(data2);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -386,22 +386,19 @@ function decodeBounds(boxOutputs, anchors3, inputSize) {
|
||||||
}
|
}
|
||||||
var BlazeFaceModel = class {
|
var BlazeFaceModel = class {
|
||||||
constructor(model6, config3) {
|
constructor(model6, config3) {
|
||||||
this.blazeFaceModel = model6;
|
this.model = model6;
|
||||||
this.width = model6.inputs[0].shape[2];
|
|
||||||
this.height = model6.inputs[0].shape[1];
|
|
||||||
this.anchorsData = generateAnchors(model6.inputs[0].shape[1]);
|
this.anchorsData = generateAnchors(model6.inputs[0].shape[1]);
|
||||||
this.anchors = tf2.tensor2d(this.anchorsData);
|
this.anchors = tf2.tensor2d(this.anchorsData);
|
||||||
this.inputSize = tf2.tensor1d([this.width, this.height]);
|
this.inputSize = model6.inputs[0].shape[2];
|
||||||
this.config = config3;
|
this.config = config3;
|
||||||
this.scaleFaces = 0.8;
|
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage) {
|
async getBoundingBoxes(inputImage) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [batch, boxes, scores] = tf2.tidy(() => {
|
const [batch, boxes, scores] = tf2.tidy(() => {
|
||||||
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
const resizedImage = inputImage.resizeBilinear([this.inputSize, this.inputSize]);
|
||||||
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
||||||
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
const batchedPrediction = this.model.predict(normalizedImage);
|
||||||
let batchOut;
|
let batchOut;
|
||||||
if (Array.isArray(batchedPrediction)) {
|
if (Array.isArray(batchedPrediction)) {
|
||||||
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
const sorted = batchedPrediction.sort((a, b) => a.size - b.size);
|
||||||
|
@ -412,7 +409,7 @@ var BlazeFaceModel = class {
|
||||||
} else {
|
} else {
|
||||||
batchOut = batchedPrediction.squeeze();
|
batchOut = batchedPrediction.squeeze();
|
||||||
}
|
}
|
||||||
const boxesOut = decodeBounds(batchOut, this.anchors, this.inputSize);
|
const boxesOut = decodeBounds(batchOut, this.anchors, [this.inputSize, this.inputSize]);
|
||||||
const logits = tf2.slice(batchOut, [0, 0], [-1, 1]);
|
const logits = tf2.slice(batchOut, [0, 0], [-1, 1]);
|
||||||
const scoresOut = tf2.sigmoid(logits).squeeze();
|
const scoresOut = tf2.sigmoid(logits).squeeze();
|
||||||
return [batchOut, boxesOut, scoresOut];
|
return [batchOut, boxesOut, scoresOut];
|
||||||
|
@ -443,7 +440,7 @@ var BlazeFaceModel = class {
|
||||||
scores.dispose();
|
scores.dispose();
|
||||||
return {
|
return {
|
||||||
boxes: annotatedBoxes,
|
boxes: annotatedBoxes,
|
||||||
scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height]
|
scaleFactor: [inputImage.shape[2] / this.inputSize, inputImage.shape[1] / this.inputSize]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -3879,8 +3876,8 @@ var Pipeline = class {
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.meshDetector = meshDetector;
|
||||||
this.irisModel = irisModel;
|
this.irisModel = irisModel;
|
||||||
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
this.boxSize = ((_a = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _a.inputs[0].shape[2]) || 0;
|
||||||
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.blazeFaceModel) == null ? void 0 : _b.inputs[0].shape[2]);
|
this.meshSize = (meshDetector == null ? void 0 : meshDetector.inputs[0].shape[2]) || ((_b = boundingBoxDetector == null ? void 0 : boundingBoxDetector.model) == null ? void 0 : _b.inputs[0].shape[2]);
|
||||||
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
this.irisSize = (irisModel == null ? void 0 : irisModel.inputs[0].shape[1]) || 0;
|
||||||
this.irisEnlarge = 2.3;
|
this.irisEnlarge = 2.3;
|
||||||
this.skipped = 0;
|
this.skipped = 0;
|
||||||
|
@ -3888,10 +3885,9 @@ var Pipeline = class {
|
||||||
}
|
}
|
||||||
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
transformRawCoords(rawCoords, box3, angle, rotationMatrix) {
|
||||||
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
const boxSize = getBoxSize({startPoint: box3.startPoint, endPoint: box3.endPoint});
|
||||||
const scaleFactor = [boxSize[0] / this.meshSize, boxSize[1] / this.boxSize];
|
|
||||||
const coordsScaled = rawCoords.map((coord) => [
|
const coordsScaled = rawCoords.map((coord) => [
|
||||||
scaleFactor[0] * (coord[0] - this.boxSize / 2),
|
boxSize[0] / this.meshSize * (coord[0] - this.meshSize / 2),
|
||||||
scaleFactor[1] * (coord[1] - this.boxSize / 2),
|
boxSize[1] / this.meshSize * (coord[1] - this.meshSize / 2),
|
||||||
coord[2]
|
coord[2]
|
||||||
]);
|
]);
|
||||||
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
const coordsRotationMatrix = angle !== 0 ? buildRotationMatrix(angle, [0, 0]) : IDENTITY_MATRIX;
|
||||||
|
@ -4004,11 +4000,17 @@ var Pipeline = class {
|
||||||
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
|
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
|
||||||
const rotatedImage = tf4.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
const rotatedImage = tf4.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
||||||
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = buildRotationMatrix(-angle, faceCenter);
|
||||||
|
if (config3.face.mesh.enabled)
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, rotatedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
} else {
|
} else {
|
||||||
rotationMatrix = IDENTITY_MATRIX;
|
rotationMatrix = IDENTITY_MATRIX;
|
||||||
const cloned = input.clone();
|
const clonedImage = input.clone();
|
||||||
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, cloned, [this.boxSize, this.boxSize]).div(255);
|
if (config3.face.mesh.enabled)
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else
|
||||||
|
face4 = cutBoxFromImageAndResize({startPoint: box3.startPoint, endPoint: box3.endPoint}, clonedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
}
|
}
|
||||||
if (!config3.face.mesh.enabled) {
|
if (!config3.face.mesh.enabled) {
|
||||||
const prediction2 = {
|
const prediction2 = {
|
||||||
|
@ -4050,7 +4052,7 @@ var Pipeline = class {
|
||||||
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);
|
||||||
}
|
}
|
||||||
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
const transformedCoordsData = this.transformRawCoords(rawCoords, box3, angle, rotationMatrix);
|
||||||
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
const landmarksBox = enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData), 1.5);
|
||||||
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
const squarifiedLandmarksBox = squarifyBox(landmarksBox);
|
||||||
const transformedCoords = tf4.tensor2d(transformedCoordsData);
|
const transformedCoords = tf4.tensor2d(transformedCoordsData);
|
||||||
const prediction = {
|
const prediction = {
|
||||||
|
@ -4357,7 +4359,6 @@ async function predict4(image13, config3) {
|
||||||
run("emotion", profileData);
|
run("emotion", profileData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
resize.dispose();
|
|
||||||
resolve(data2);
|
resolve(data2);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -52,34 +52,28 @@ function decodeBounds(boxOutputs, anchors, inputSize) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export class BlazeFaceModel {
|
export class BlazeFaceModel {
|
||||||
blazeFaceModel: any;
|
model: any;
|
||||||
width: number;
|
|
||||||
height: number;
|
|
||||||
anchorsData: any;
|
anchorsData: any;
|
||||||
anchors: any;
|
anchors: any;
|
||||||
inputSize: any;
|
inputSize: number;
|
||||||
config: any;
|
config: any;
|
||||||
scaleFaces: number;
|
|
||||||
|
|
||||||
constructor(model, config) {
|
constructor(model, config) {
|
||||||
this.blazeFaceModel = model;
|
this.model = model;
|
||||||
this.width = model.inputs[0].shape[2];
|
|
||||||
this.height = model.inputs[0].shape[1];
|
|
||||||
this.anchorsData = generateAnchors(model.inputs[0].shape[1]);
|
this.anchorsData = generateAnchors(model.inputs[0].shape[1]);
|
||||||
this.anchors = tf.tensor2d(this.anchorsData);
|
this.anchors = tf.tensor2d(this.anchorsData);
|
||||||
this.inputSize = tf.tensor1d([this.width, this.height]);
|
this.inputSize = model.inputs[0].shape[2];
|
||||||
this.config = config;
|
this.config = config;
|
||||||
this.scaleFaces = 0.8;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async getBoundingBoxes(inputImage) {
|
async getBoundingBoxes(inputImage) {
|
||||||
// sanity check on input
|
// sanity check on input
|
||||||
if ((!inputImage) || (inputImage.isDisposedInternal) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;
|
if ((!inputImage) || (inputImage.isDisposedInternal) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;
|
||||||
const [batch, boxes, scores] = tf.tidy(() => {
|
const [batch, boxes, scores] = tf.tidy(() => {
|
||||||
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
const resizedImage = inputImage.resizeBilinear([this.inputSize, this.inputSize]);
|
||||||
// const normalizedImage = tf.mul(tf.sub(resizedImage.div(255), 0.5), 2);
|
// const normalizedImage = tf.mul(tf.sub(resizedImage.div(255), 0.5), 2);
|
||||||
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
const normalizedImage = resizedImage.div(127.5).sub(0.5);
|
||||||
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
const batchedPrediction = this.model.predict(normalizedImage);
|
||||||
let batchOut;
|
let batchOut;
|
||||||
// are we using tfhub or pinto converted model?
|
// are we using tfhub or pinto converted model?
|
||||||
if (Array.isArray(batchedPrediction)) {
|
if (Array.isArray(batchedPrediction)) {
|
||||||
|
@ -91,7 +85,7 @@ export class BlazeFaceModel {
|
||||||
} else {
|
} else {
|
||||||
batchOut = batchedPrediction.squeeze(); // when using tfhub model
|
batchOut = batchedPrediction.squeeze(); // when using tfhub model
|
||||||
}
|
}
|
||||||
const boxesOut = decodeBounds(batchOut, this.anchors, this.inputSize);
|
const boxesOut = decodeBounds(batchOut, this.anchors, [this.inputSize, this.inputSize]);
|
||||||
const logits = tf.slice(batchOut, [0, 0], [-1, 1]);
|
const logits = tf.slice(batchOut, [0, 0], [-1, 1]);
|
||||||
const scoresOut = tf.sigmoid(logits).squeeze();
|
const scoresOut = tf.sigmoid(logits).squeeze();
|
||||||
return [batchOut, boxesOut, scoresOut];
|
return [batchOut, boxesOut, scoresOut];
|
||||||
|
@ -123,7 +117,7 @@ export class BlazeFaceModel {
|
||||||
scores.dispose();
|
scores.dispose();
|
||||||
return {
|
return {
|
||||||
boxes: annotatedBoxes,
|
boxes: annotatedBoxes,
|
||||||
scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height],
|
scaleFactor: [inputImage.shape[2] / this.inputSize, inputImage.shape[1] / this.inputSize],
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,8 +56,8 @@ export class Pipeline {
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.meshDetector = meshDetector;
|
||||||
this.irisModel = irisModel;
|
this.irisModel = irisModel;
|
||||||
this.boxSize = boundingBoxDetector?.blazeFaceModel?.inputs[0].shape[2] || 0;
|
this.boxSize = boundingBoxDetector?.model?.inputs[0].shape[2] || 0;
|
||||||
this.meshSize = meshDetector?.inputs[0].shape[2] || boundingBoxDetector?.blazeFaceModel?.inputs[0].shape[2];
|
this.meshSize = meshDetector?.inputs[0].shape[2] || boundingBoxDetector?.model?.inputs[0].shape[2];
|
||||||
this.irisSize = irisModel?.inputs[0].shape[1] || 0;
|
this.irisSize = irisModel?.inputs[0].shape[1] || 0;
|
||||||
this.irisEnlarge = 2.3;
|
this.irisEnlarge = 2.3;
|
||||||
this.skipped = 0;
|
this.skipped = 0;
|
||||||
|
@ -66,10 +66,10 @@ export class Pipeline {
|
||||||
|
|
||||||
transformRawCoords(rawCoords, box, angle, rotationMatrix) {
|
transformRawCoords(rawCoords, box, angle, rotationMatrix) {
|
||||||
const boxSize = bounding.getBoxSize({ startPoint: box.startPoint, endPoint: box.endPoint });
|
const boxSize = bounding.getBoxSize({ startPoint: box.startPoint, endPoint: box.endPoint });
|
||||||
const scaleFactor = [boxSize[0] / this.meshSize, boxSize[1] / this.boxSize];
|
|
||||||
const coordsScaled = rawCoords.map((coord) => ([
|
const coordsScaled = rawCoords.map((coord) => ([
|
||||||
scaleFactor[0] * (coord[0] - this.boxSize / 2),
|
boxSize[0] / this.meshSize * (coord[0] - this.meshSize / 2),
|
||||||
scaleFactor[1] * (coord[1] - this.boxSize / 2), coord[2],
|
boxSize[1] / this.meshSize * (coord[1] - this.meshSize / 2),
|
||||||
|
coord[2],
|
||||||
]));
|
]));
|
||||||
const coordsRotationMatrix = (angle !== 0) ? util.buildRotationMatrix(angle, [0, 0]) : util.IDENTITY_MATRIX;
|
const coordsRotationMatrix = (angle !== 0) ? util.buildRotationMatrix(angle, [0, 0]) : util.IDENTITY_MATRIX;
|
||||||
const coordsRotated = (angle !== 0) ? coordsScaled.map((coord) => ([...util.rotatePoint(coord, coordsRotationMatrix), coord[2]])) : coordsScaled;
|
const coordsRotated = (angle !== 0) ? coordsScaled.map((coord) => ([...util.rotatePoint(coord, coordsRotationMatrix), coord[2]])) : coordsScaled;
|
||||||
|
@ -185,6 +185,7 @@ export class Pipeline {
|
||||||
let face;
|
let face;
|
||||||
let angle = 0;
|
let angle = 0;
|
||||||
let rotationMatrix;
|
let rotationMatrix;
|
||||||
|
|
||||||
if (config.face.detector.rotation && config.face.mesh.enabled && tf.ENV.flags.IS_BROWSER) {
|
if (config.face.detector.rotation && config.face.mesh.enabled && tf.ENV.flags.IS_BROWSER) {
|
||||||
const [indexOfMouth, indexOfForehead] = (box.landmarks.length >= LANDMARKS_COUNT) ? MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES : BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;
|
const [indexOfMouth, indexOfForehead] = (box.landmarks.length >= LANDMARKS_COUNT) ? MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES : BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;
|
||||||
angle = util.computeRotation(box.landmarks[indexOfMouth], box.landmarks[indexOfForehead]);
|
angle = util.computeRotation(box.landmarks[indexOfMouth], box.landmarks[indexOfForehead]);
|
||||||
|
@ -192,11 +193,13 @@ export class Pipeline {
|
||||||
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
|
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
|
||||||
const rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized); // rotateWithOffset is not defined for tfjs-node
|
const rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized); // rotateWithOffset is not defined for tfjs-node
|
||||||
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
|
||||||
face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
if (config.face.mesh.enabled) face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, rotatedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, rotatedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
} else {
|
} else {
|
||||||
rotationMatrix = util.IDENTITY_MATRIX;
|
rotationMatrix = util.IDENTITY_MATRIX;
|
||||||
const cloned = input.clone();
|
const clonedImage = input.clone();
|
||||||
face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, cloned, [this.boxSize, this.boxSize]).div(255);
|
if (config.face.mesh.enabled) face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, clonedImage, [this.meshSize, this.meshSize]).div(255);
|
||||||
|
else face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, clonedImage, [this.boxSize, this.boxSize]).div(255);
|
||||||
}
|
}
|
||||||
|
|
||||||
// if we're not going to produce mesh, don't spend time with further processing
|
// if we're not going to produce mesh, don't spend time with further processing
|
||||||
|
@ -244,7 +247,7 @@ export class Pipeline {
|
||||||
}
|
}
|
||||||
|
|
||||||
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||||
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData), 1.5);
|
||||||
const squarifiedLandmarksBox = bounding.squarifyBox(landmarksBox);
|
const squarifiedLandmarksBox = bounding.squarifyBox(landmarksBox);
|
||||||
const transformedCoords = tf.tensor2d(transformedCoordsData);
|
const transformedCoords = tf.tensor2d(transformedCoordsData);
|
||||||
const prediction = {
|
const prediction = {
|
||||||
|
|
|
@ -30,8 +30,9 @@ export function simmilarity(embedding1, embedding2) {
|
||||||
export async function predict(image, config) {
|
export async function predict(image, config) {
|
||||||
if (!model) return null;
|
if (!model) return null;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // input is already normalized to 0..1
|
||||||
// const normalize = tf.tidy(() => resize.div(127.5).sub(0.5)); // this is -0.5...0.5 ???
|
// const mean = resize.mean();
|
||||||
|
// const whiten = resize.sub(mean); // normalizes with mean value being at point 0
|
||||||
let data: Array<[]> = [];
|
let data: Array<[]> = [];
|
||||||
if (config.face.embedding.enabled) {
|
if (config.face.embedding.enabled) {
|
||||||
if (!config.profile) {
|
if (!config.profile) {
|
||||||
|
@ -45,8 +46,6 @@ export async function predict(image, config) {
|
||||||
profile.run('emotion', profileData);
|
profile.run('emotion', profileData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
resize.dispose();
|
|
||||||
// normalize.dispose();
|
|
||||||
resolve(data);
|
resolve(data);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,10 @@
|
||||||
export declare const disposeBox: (box: any) => void;
|
export declare const disposeBox: (box: any) => void;
|
||||||
export declare class BlazeFaceModel {
|
export declare class BlazeFaceModel {
|
||||||
blazeFaceModel: any;
|
model: any;
|
||||||
width: number;
|
|
||||||
height: number;
|
|
||||||
anchorsData: any;
|
anchorsData: any;
|
||||||
anchors: any;
|
anchors: any;
|
||||||
inputSize: any;
|
inputSize: number;
|
||||||
config: any;
|
config: any;
|
||||||
scaleFaces: number;
|
|
||||||
constructor(model: any, config: any);
|
constructor(model: any, config: any);
|
||||||
getBoundingBoxes(inputImage: any): Promise<{
|
getBoundingBoxes(inputImage: any): Promise<{
|
||||||
boxes: {
|
boxes: {
|
||||||
|
|
Loading…
Reference in New Issue