Option to return raw data (mesh, box) for Facemesh / "preserve aspect ratio" fix from Facemesh upstream

pull/70/head
ButzYung 2020-12-22 21:36:01 +08:00 committed by Vladimir Mandic
parent 53d4880aaa
commit 95bbeb1146
3 changed files with 24 additions and 2 deletions

View File

@ -16,6 +16,8 @@ class MediaPipeFaceMesh {
for (const prediction of (predictions || [])) {
if (prediction.isDisposedInternal) continue; // guard against disposed tensors on long running operations such as pause in middle of processing
const mesh = prediction.coords ? prediction.coords.arraySync() : null;
// AT: mesh_raw
const mesh_raw = prediction.rawCoords;
const annotations = {};
if (mesh && mesh.length > 0) {
for (let key = 0; key < coords.MESH_ANNOTATIONS.length; key++) {
@ -24,16 +26,23 @@ class MediaPipeFaceMesh {
}
}
}
// AT: raw version of box, the same as the TFJS Facemesh output version (.boundingBox)
const box_raw = (config.face.mesh.requestRawData && prediction.box) ? {topLeft: prediction.box.startPoint, bottomRight: prediction.box.endPoint} : null;
const box = prediction.box ? [
Math.max(0, prediction.box.startPoint[0]),
Math.max(0, prediction.box.startPoint[1]),
Math.min(input.shape[2], prediction.box.endPoint[0]) - prediction.box.startPoint[0],
Math.min(input.shape[1], prediction.box.endPoint[1]) - prediction.box.startPoint[1],
] : 0;
results.push({
confidence: prediction.confidence || 0,
box,
mesh,
// AT: box_raw, mesh_raw
box_raw,
mesh_raw,
annotations,
image: prediction.image ? tf.clone(prediction.image) : null,
});

View File

@ -157,9 +157,13 @@ class Pipeline {
for (let i = 0; i < this.storedBoxes.length; i++) {
const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);
const enlargedBox = bounding.enlargeBox(scaledBox);
// AT: preserve aspect ratio, pulled from Facemesh upstream (https://github.com/tensorflow/tfjs-models/commit/85e6e487cc4bd21f0707a509e5024484a0798aa0)
const squarifiedBox = bounding.squarifyBox(enlargedBox);
const landmarks = this.storedBoxes[i].landmarks.arraySync();
const confidence = this.storedBoxes[i].confidence;
this.storedBoxes[i] = { ...enlargedBox, confidence, landmarks };
// AT: preserve aspect ratio, pulled from Facemesh upstream
this.storedBoxes[i] = { ...squarifiedBox, confidence, landmarks };
// this.storedBoxes[i] = { ...enlargedBox, confidence, landmarks };
}
this.runsWithoutFaceDetector = 0;
}
@ -235,15 +239,21 @@ class Pipeline {
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
// AT: preserve aspect ratio, pulled from Facemesh upstream
const squarifiedLandmarksBox = bounding.squarifyBox(landmarksBox);
const transformedCoords = tf.tensor2d(transformedCoordsData);
const prediction = {
coords: transformedCoords,
// AT: rawCoords
rawCoords: (config.face.mesh.requestRawData) ? rawCoords : null,
box: landmarksBox,
faceConfidence: confidenceVal,
confidence: box.confidence,
image: face,
};
this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };
// AT: preserve aspect ratio, pulled from Facemesh upstream
this.storedBoxes[i] = { ...squarifiedLandmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };
// this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };
return prediction;
}));

View File

@ -291,6 +291,9 @@ class Human {
confidence: face.confidence,
box: face.box,
mesh: face.mesh,
// AT: box_raw, mesh_raw
box_raw: face.box_raw,
mesh_raw: face.mesh_raw,
annotations: face.annotations,
age: ageRes.age,
gender: genderRes.gender,