Option to return raw data (mesh, box) for Facemesh / "preserve aspect ratio" fix from Facemesh upstream

pull/70/head
ButzYung 2020-12-22 21:36:01 +08:00 committed by Vladimir Mandic
parent 53d4880aaa
commit 95bbeb1146
3 changed files with 24 additions and 2 deletions

View File

@ -16,6 +16,8 @@ class MediaPipeFaceMesh {
for (const prediction of (predictions || [])) { for (const prediction of (predictions || [])) {
if (prediction.isDisposedInternal) continue; // guard against disposed tensors on long running operations such as pause in middle of processing if (prediction.isDisposedInternal) continue; // guard against disposed tensors on long running operations such as pause in middle of processing
const mesh = prediction.coords ? prediction.coords.arraySync() : null; const mesh = prediction.coords ? prediction.coords.arraySync() : null;
// AT: mesh_raw
const mesh_raw = prediction.rawCoords;
const annotations = {}; const annotations = {};
if (mesh && mesh.length > 0) { if (mesh && mesh.length > 0) {
for (let key = 0; key < coords.MESH_ANNOTATIONS.length; key++) { for (let key = 0; key < coords.MESH_ANNOTATIONS.length; key++) {
@ -24,16 +26,23 @@ class MediaPipeFaceMesh {
} }
} }
} }
// AT: raw version of box, the same as the TFJS Facemesh output version (.boundingBox)
const box_raw = (config.face.mesh.requestRawData && prediction.box) ? {topLeft: prediction.box.startPoint, bottomRight: prediction.box.endPoint} : null;
const box = prediction.box ? [ const box = prediction.box ? [
Math.max(0, prediction.box.startPoint[0]), Math.max(0, prediction.box.startPoint[0]),
Math.max(0, prediction.box.startPoint[1]), Math.max(0, prediction.box.startPoint[1]),
Math.min(input.shape[2], prediction.box.endPoint[0]) - prediction.box.startPoint[0], Math.min(input.shape[2], prediction.box.endPoint[0]) - prediction.box.startPoint[0],
Math.min(input.shape[1], prediction.box.endPoint[1]) - prediction.box.startPoint[1], Math.min(input.shape[1], prediction.box.endPoint[1]) - prediction.box.startPoint[1],
] : 0; ] : 0;
results.push({ results.push({
confidence: prediction.confidence || 0, confidence: prediction.confidence || 0,
box, box,
mesh, mesh,
// AT: box_raw, mesh_raw
box_raw,
mesh_raw,
annotations, annotations,
image: prediction.image ? tf.clone(prediction.image) : null, image: prediction.image ? tf.clone(prediction.image) : null,
}); });

View File

@ -157,9 +157,13 @@ class Pipeline {
for (let i = 0; i < this.storedBoxes.length; i++) { for (let i = 0; i < this.storedBoxes.length; i++) {
const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor); const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);
const enlargedBox = bounding.enlargeBox(scaledBox); const enlargedBox = bounding.enlargeBox(scaledBox);
// AT: preserve aspect ratio, pulled from Facemesh upstream (https://github.com/tensorflow/tfjs-models/commit/85e6e487cc4bd21f0707a509e5024484a0798aa0)
const squarifiedBox = bounding.squarifyBox(enlargedBox);
const landmarks = this.storedBoxes[i].landmarks.arraySync(); const landmarks = this.storedBoxes[i].landmarks.arraySync();
const confidence = this.storedBoxes[i].confidence; const confidence = this.storedBoxes[i].confidence;
this.storedBoxes[i] = { ...enlargedBox, confidence, landmarks }; // AT: preserve aspect ratio, pulled from Facemesh upstream
this.storedBoxes[i] = { ...squarifiedBox, confidence, landmarks };
// this.storedBoxes[i] = { ...enlargedBox, confidence, landmarks };
} }
this.runsWithoutFaceDetector = 0; this.runsWithoutFaceDetector = 0;
} }
@ -235,15 +239,21 @@ class Pipeline {
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix); const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData)); const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
// AT: preserve aspect ratio, pulled from Facemesh upstream
const squarifiedLandmarksBox = bounding.squarifyBox(landmarksBox);
const transformedCoords = tf.tensor2d(transformedCoordsData); const transformedCoords = tf.tensor2d(transformedCoordsData);
const prediction = { const prediction = {
coords: transformedCoords, coords: transformedCoords,
// AT: rawCoords
rawCoords: (config.face.mesh.requestRawData) ? rawCoords : null,
box: landmarksBox, box: landmarksBox,
faceConfidence: confidenceVal, faceConfidence: confidenceVal,
confidence: box.confidence, confidence: box.confidence,
image: face, image: face,
}; };
this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal }; // AT: preserve aspect ratio, pulled from Facemesh upstream
this.storedBoxes[i] = { ...squarifiedLandmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };
// this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };
return prediction; return prediction;
})); }));

View File

@ -291,6 +291,9 @@ class Human {
confidence: face.confidence, confidence: face.confidence,
box: face.box, box: face.box,
mesh: face.mesh, mesh: face.mesh,
// AT: box_raw, mesh_raw
box_raw: face.box_raw,
mesh_raw: face.mesh_raw,
annotations: face.annotations, annotations: face.annotations,
age: ageRes.age, age: ageRes.age,
gender: genderRes.gender, gender: genderRes.gender,