mirror of https://github.com/vladmandic/human
fix centernet & update blazeface
parent
d1ec2d9f14
commit
3adbbe152b
|
@ -9,7 +9,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
### **HEAD -> main** 2021/08/06 mandic00@live.com
|
### **HEAD -> main** 2021/08/09 mandic00@live.com
|
||||||
|
|
||||||
- minor update
|
- minor update
|
||||||
- replace movenet with lightning-v4
|
- replace movenet with lightning-v4
|
||||||
|
|
|
@ -40,15 +40,15 @@ let userConfig = {
|
||||||
enabled: false,
|
enabled: false,
|
||||||
flip: false,
|
flip: false,
|
||||||
},
|
},
|
||||||
face: { enabled: false,
|
face: { enabled: true,
|
||||||
detector: { return: true },
|
detector: { return: false },
|
||||||
mesh: { enabled: true },
|
mesh: { enabled: true },
|
||||||
iris: { enabled: false },
|
iris: { enabled: false },
|
||||||
description: { enabled: false },
|
description: { enabled: false },
|
||||||
emotion: { enabled: false },
|
emotion: { enabled: false },
|
||||||
},
|
},
|
||||||
object: { enabled: false },
|
object: { enabled: false },
|
||||||
gesture: { enabled: true },
|
gesture: { enabled: false },
|
||||||
hand: { enabled: false },
|
hand: { enabled: false },
|
||||||
body: { enabled: false },
|
body: { enabled: false },
|
||||||
// body: { enabled: true, modelPath: 'posenet.json' },
|
// body: { enabled: true, modelPath: 'posenet.json' },
|
||||||
|
@ -164,7 +164,8 @@ function status(msg) {
|
||||||
div.innerText = msg;
|
div.innerText = msg;
|
||||||
} else {
|
} else {
|
||||||
const video = document.getElementById('video');
|
const video = document.getElementById('video');
|
||||||
document.getElementById('play').style.display = (video.srcObject !== null) && !video.paused ? 'none' : 'block';
|
const playing = (video.srcObject !== null) && !video.paused;
|
||||||
|
document.getElementById('play').style.display = playing ? 'none' : 'block';
|
||||||
document.getElementById('loader').style.display = 'none';
|
document.getElementById('loader').style.display = 'none';
|
||||||
div.innerText = '';
|
div.innerText = '';
|
||||||
}
|
}
|
||||||
|
@ -259,9 +260,10 @@ async function drawResults(input) {
|
||||||
const avgDraw = ui.drawFPS.length > 0 ? Math.trunc(10 * ui.drawFPS.reduce((a, b) => a + b, 0) / ui.drawFPS.length) / 10 : 0;
|
const avgDraw = ui.drawFPS.length > 0 ? Math.trunc(10 * ui.drawFPS.reduce((a, b) => a + b, 0) / ui.drawFPS.length) / 10 : 0;
|
||||||
const warning = (ui.detectFPS.length > 5) && (avgDetect < 2) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '';
|
const warning = (ui.detectFPS.length > 5) && (avgDetect < 2) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '';
|
||||||
const fps = avgDetect > 0 ? `FPS process:${avgDetect} refresh:${avgDraw}` : '';
|
const fps = avgDetect > 0 ? `FPS process:${avgDetect} refresh:${avgDraw}` : '';
|
||||||
|
const backend = engine.state.numTensors > 0 ? `backend: ${human.tf.getBackend()} | ${memory}` : 'running in web worker';
|
||||||
document.getElementById('log').innerHTML = `
|
document.getElementById('log').innerHTML = `
|
||||||
video: ${ui.camera.name} | facing: ${ui.camera.facing} | screen: ${window.innerWidth} x ${window.innerHeight} camera: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
|
video: ${ui.camera.name} | facing: ${ui.camera.facing} | screen: ${window.innerWidth} x ${window.innerHeight} camera: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
|
||||||
backend: ${human.tf.getBackend()} | ${memory}<br>
|
backend: ${backend}<br>
|
||||||
performance: ${str(lastDetectedResult.performance)}ms ${fps}<br>
|
performance: ${str(lastDetectedResult.performance)}ms ${fps}<br>
|
||||||
${warning}<br>
|
${warning}<br>
|
||||||
`;
|
`;
|
||||||
|
@ -363,6 +365,7 @@ async function setupCamera() {
|
||||||
// eslint-disable-next-line no-use-before-define
|
// eslint-disable-next-line no-use-before-define
|
||||||
if (live && !ui.detectThread) runHumanDetect(video, canvas);
|
if (live && !ui.detectThread) runHumanDetect(video, canvas);
|
||||||
ui.busy = false;
|
ui.busy = false;
|
||||||
|
status();
|
||||||
resolve();
|
resolve();
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
@ -597,6 +600,7 @@ async function detectVideo() {
|
||||||
document.getElementById('btnStartText').innerHTML = 'pause video';
|
document.getElementById('btnStartText').innerHTML = 'pause video';
|
||||||
await video.play();
|
await video.play();
|
||||||
runHumanDetect(video, canvas);
|
runHumanDetect(video, canvas);
|
||||||
|
status();
|
||||||
} else {
|
} else {
|
||||||
status(cameraError);
|
status(cameraError);
|
||||||
}
|
}
|
||||||
|
@ -878,6 +882,7 @@ async function pwaRegister() {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
|
/*
|
||||||
window.addEventListener('unhandledrejection', (evt) => {
|
window.addEventListener('unhandledrejection', (evt) => {
|
||||||
// eslint-disable-next-line no-console
|
// eslint-disable-next-line no-console
|
||||||
console.error(evt.reason || evt);
|
console.error(evt.reason || evt);
|
||||||
|
@ -885,6 +890,7 @@ async function main() {
|
||||||
status('exception error');
|
status('exception error');
|
||||||
evt.preventDefault();
|
evt.preventDefault();
|
||||||
});
|
});
|
||||||
|
*/
|
||||||
|
|
||||||
log('demo starting ...');
|
log('demo starting ...');
|
||||||
|
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -517,7 +517,7 @@ var BlazeFaceModel = class {
|
||||||
});
|
});
|
||||||
this.config = mergeDeep(this.config, userConfig);
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
const nmsTensor = await tf3.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
|
const nmsTensor = await tf3.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
|
||||||
const nms = nmsTensor.arraySync();
|
const nms = await nmsTensor.array();
|
||||||
tf3.dispose(nmsTensor);
|
tf3.dispose(nmsTensor);
|
||||||
const annotatedBoxes = [];
|
const annotatedBoxes = [];
|
||||||
for (let i = 0; i < nms.length; i++) {
|
for (let i = 0; i < nms.length; i++) {
|
||||||
|
@ -3955,7 +3955,10 @@ var Pipeline = class {
|
||||||
this.storedBoxes = [];
|
this.storedBoxes = [];
|
||||||
this.detectedFaces = 0;
|
this.detectedFaces = 0;
|
||||||
for (const possible of detector.boxes) {
|
for (const possible of detector.boxes) {
|
||||||
this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks.arraySync(), confidence: possible.confidence });
|
const startPoint = await possible.box.startPoint.data();
|
||||||
|
const endPoint = await possible.box.endPoint.data();
|
||||||
|
const landmarks = await possible.landmarks.array();
|
||||||
|
this.storedBoxes.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
|
||||||
}
|
}
|
||||||
if (this.storedBoxes.length > 0)
|
if (this.storedBoxes.length > 0)
|
||||||
useFreshBox = true;
|
useFreshBox = true;
|
||||||
|
@ -4118,7 +4121,6 @@ async function predict(input, config3) {
|
||||||
mesh: prediction.mesh,
|
mesh: prediction.mesh,
|
||||||
meshRaw,
|
meshRaw,
|
||||||
annotations: annotations3,
|
annotations: annotations3,
|
||||||
image: prediction.image,
|
|
||||||
tensor: prediction.image
|
tensor: prediction.image
|
||||||
});
|
});
|
||||||
if (prediction.coords)
|
if (prediction.coords)
|
||||||
|
@ -7707,7 +7709,7 @@ var HandDetector = class {
|
||||||
const boxes = this.normalizeBoxes(rawBoxes);
|
const boxes = this.normalizeBoxes(rawBoxes);
|
||||||
tf10.dispose(rawBoxes);
|
tf10.dispose(rawBoxes);
|
||||||
const filteredT = await tf10.image.nonMaxSuppressionAsync(boxes, scores, config3.hand.maxDetected, config3.hand.iouThreshold, config3.hand.minConfidence);
|
const filteredT = await tf10.image.nonMaxSuppressionAsync(boxes, scores, config3.hand.maxDetected, config3.hand.iouThreshold, config3.hand.minConfidence);
|
||||||
const filtered = filteredT.arraySync();
|
const filtered = await filteredT.array();
|
||||||
tf10.dispose(scoresT);
|
tf10.dispose(scoresT);
|
||||||
tf10.dispose(filteredT);
|
tf10.dispose(filteredT);
|
||||||
const hands = [];
|
const hands = [];
|
||||||
|
@ -7737,7 +7739,7 @@ var HandDetector = class {
|
||||||
const boxes = prediction.box.dataSync();
|
const boxes = prediction.box.dataSync();
|
||||||
const startPoint = boxes.slice(0, 2);
|
const startPoint = boxes.slice(0, 2);
|
||||||
const endPoint = boxes.slice(2, 4);
|
const endPoint = boxes.slice(2, 4);
|
||||||
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
const palmLandmarks = await prediction.palmLandmarks.array();
|
||||||
tf10.dispose(prediction.box);
|
tf10.dispose(prediction.box);
|
||||||
tf10.dispose(prediction.palmLandmarks);
|
tf10.dispose(prediction.palmLandmarks);
|
||||||
hands.push(scaleBoxCoordinates2({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
|
hands.push(scaleBoxCoordinates2({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
|
||||||
|
@ -7911,7 +7913,7 @@ var HandPipeline = class {
|
||||||
tf11.dispose(confidenceT);
|
tf11.dispose(confidenceT);
|
||||||
if (confidence >= config3.hand.minConfidence) {
|
if (confidence >= config3.hand.minConfidence) {
|
||||||
const keypointsReshaped = tf11.reshape(keypoints3, [-1, 3]);
|
const keypointsReshaped = tf11.reshape(keypoints3, [-1, 3]);
|
||||||
const rawCoords = keypointsReshaped.arraySync();
|
const rawCoords = await keypointsReshaped.array();
|
||||||
tf11.dispose(keypoints3);
|
tf11.dispose(keypoints3);
|
||||||
tf11.dispose(keypointsReshaped);
|
tf11.dispose(keypointsReshaped);
|
||||||
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||||
|
@ -8310,7 +8312,7 @@ async function predict8(image18, config3) {
|
||||||
tf15.dispose(tensor2);
|
tf15.dispose(tensor2);
|
||||||
if (resT) {
|
if (resT) {
|
||||||
keypoints2.length = 0;
|
keypoints2.length = 0;
|
||||||
const res = resT.arraySync();
|
const res = await resT.array();
|
||||||
tf15.dispose(resT);
|
tf15.dispose(resT);
|
||||||
const kpt3 = res[0][0];
|
const kpt3 = res[0][0];
|
||||||
for (let id = 0; id < kpt3.length; id++) {
|
for (let id = 0; id < kpt3.length; id++) {
|
||||||
|
@ -8463,14 +8465,14 @@ async function process2(res, inputSize, outputShape, config3) {
|
||||||
let id = 0;
|
let id = 0;
|
||||||
let results = [];
|
let results = [];
|
||||||
for (const strideSize of [1, 2, 4]) {
|
for (const strideSize of [1, 2, 4]) {
|
||||||
tf16.tidy(() => {
|
tf16.tidy(async () => {
|
||||||
var _a, _b;
|
var _a, _b;
|
||||||
const baseSize = strideSize * 13;
|
const baseSize = strideSize * 13;
|
||||||
const scoresT = (_a = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] === labels.length)) == null ? void 0 : _a.squeeze();
|
const scoresT = (_a = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] === labels.length)) == null ? void 0 : _a.squeeze();
|
||||||
const featuresT = (_b = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] < labels.length)) == null ? void 0 : _b.squeeze();
|
const featuresT = (_b = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] < labels.length)) == null ? void 0 : _b.squeeze();
|
||||||
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]);
|
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]);
|
||||||
const boxIdx = boxesMax.argMax(2).arraySync();
|
const boxIdx = await boxesMax.argMax(2).array();
|
||||||
const scores = scoresT.arraySync();
|
const scores = await scoresT.array();
|
||||||
for (let i = 0; i < scoresT.shape[0]; i++) {
|
for (let i = 0; i < scoresT.shape[0]; i++) {
|
||||||
for (let j = 0; j < scoresT.shape[1]; j++) {
|
for (let j = 0; j < scoresT.shape[1]; j++) {
|
||||||
const score3 = scores[i][j];
|
const score3 = scores[i][j];
|
||||||
|
@ -8567,15 +8569,15 @@ async function process3(res, inputSize, outputShape, config3) {
|
||||||
if (!res)
|
if (!res)
|
||||||
return [];
|
return [];
|
||||||
const results = [];
|
const results = [];
|
||||||
const detections = res.arraySync();
|
const detections = await res.array();
|
||||||
const squeezeT = tf17.squeeze(res);
|
const squeezeT = tf17.squeeze(res);
|
||||||
tf17.dispose(res);
|
tf17.dispose(res);
|
||||||
const arr = tf17.split(squeezeT, 6, 1);
|
const arr = tf17.split(squeezeT, 6, 1);
|
||||||
tf17.dispose(squeezeT);
|
tf17.dispose(squeezeT);
|
||||||
const stackT = tf17.stack([arr[1], arr[0], arr[3], arr[2]], 1);
|
const stackT = tf17.stack([arr[1], arr[0], arr[3], arr[2]], 1);
|
||||||
const boxesT = stackT.squeeze();
|
const boxesT = tf17.squeeze(stackT);
|
||||||
const scoresT = arr[4].squeeze();
|
const scoresT = tf17.squeeze(arr[4]);
|
||||||
const classesT = arr[5].squeeze();
|
const classesT = tf17.squeeze(arr[5]);
|
||||||
arr.forEach((t) => tf17.dispose(t));
|
arr.forEach((t) => tf17.dispose(t));
|
||||||
const nmsT = await tf17.image.nonMaxSuppressionAsync(boxesT, scoresT, config3.object.maxDetected, config3.object.iouThreshold, config3.object.minConfidence);
|
const nmsT = await tf17.image.nonMaxSuppressionAsync(boxesT, scoresT, config3.object.maxDetected, config3.object.iouThreshold, config3.object.minConfidence);
|
||||||
tf17.dispose(boxesT);
|
tf17.dispose(boxesT);
|
||||||
|
@ -9765,28 +9767,28 @@ var detectFace = async (parent, input) => {
|
||||||
return [];
|
return [];
|
||||||
for (let i = 0; i < faces.length; i++) {
|
for (let i = 0; i < faces.length; i++) {
|
||||||
parent.analyze("Get Face");
|
parent.analyze("Get Face");
|
||||||
if (!faces[i].image || faces[i].image["isDisposedInternal"]) {
|
if (!faces[i].tensor || faces[i].tensor["isDisposedInternal"]) {
|
||||||
log("Face object is disposed:", faces[i].image);
|
log("Face object is disposed:", faces[i].tensor);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const rotation = calculateFaceAngle(faces[i], [input.shape[2], input.shape[1]]);
|
const rotation = calculateFaceAngle(faces[i], [input.shape[2], input.shape[1]]);
|
||||||
parent.analyze("Start Emotion:");
|
parent.analyze("Start Emotion:");
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
|
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
|
||||||
} else {
|
} else {
|
||||||
parent.state = "run:emotion";
|
parent.state = "run:emotion";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
|
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
|
||||||
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze("End Emotion:");
|
parent.analyze("End Emotion:");
|
||||||
parent.analyze("Start Description:");
|
parent.analyze("Start Description:");
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
descRes = parent.config.face.description.enabled ? predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
|
descRes = parent.config.face.description.enabled ? predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
|
||||||
} else {
|
} else {
|
||||||
parent.state = "run:description";
|
parent.state = "run:description";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
descRes = parent.config.face.description.enabled ? await predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
|
descRes = parent.config.face.description.enabled ? await predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
|
||||||
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze("End Description:");
|
parent.analyze("End Description:");
|
||||||
|
@ -9799,6 +9801,10 @@ var detectFace = async (parent, input) => {
|
||||||
delete faces[i].annotations.rightEyeIris;
|
delete faces[i].annotations.rightEyeIris;
|
||||||
}
|
}
|
||||||
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
||||||
|
const tensor2 = parent.config.face.detector.return ? tf20.squeeze(faces[i].tensor) : null;
|
||||||
|
tf20.dispose(faces[i].tensor);
|
||||||
|
if (faces[i].tensor)
|
||||||
|
delete faces[i].tensor;
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
|
@ -9809,11 +9815,8 @@ var detectFace = async (parent, input) => {
|
||||||
emotion: emotionRes,
|
emotion: emotionRes,
|
||||||
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
|
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
|
||||||
rotation,
|
rotation,
|
||||||
tensor: parent.config.face.detector.return ? tf20.squeeze(faces[i].image) : null
|
tensor: tensor2
|
||||||
});
|
});
|
||||||
tf20.dispose(faces[i].image);
|
|
||||||
if (faces[i].image)
|
|
||||||
delete faces[i].image;
|
|
||||||
parent.analyze("End Face");
|
parent.analyze("End Face");
|
||||||
}
|
}
|
||||||
parent.analyze("End FaceMesh:");
|
parent.analyze("End FaceMesh:");
|
||||||
|
|
|
@ -518,7 +518,7 @@ var BlazeFaceModel = class {
|
||||||
});
|
});
|
||||||
this.config = mergeDeep(this.config, userConfig);
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
const nmsTensor = await tf3.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
|
const nmsTensor = await tf3.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
|
||||||
const nms = nmsTensor.arraySync();
|
const nms = await nmsTensor.array();
|
||||||
tf3.dispose(nmsTensor);
|
tf3.dispose(nmsTensor);
|
||||||
const annotatedBoxes = [];
|
const annotatedBoxes = [];
|
||||||
for (let i = 0; i < nms.length; i++) {
|
for (let i = 0; i < nms.length; i++) {
|
||||||
|
@ -3956,7 +3956,10 @@ var Pipeline = class {
|
||||||
this.storedBoxes = [];
|
this.storedBoxes = [];
|
||||||
this.detectedFaces = 0;
|
this.detectedFaces = 0;
|
||||||
for (const possible of detector.boxes) {
|
for (const possible of detector.boxes) {
|
||||||
this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks.arraySync(), confidence: possible.confidence });
|
const startPoint = await possible.box.startPoint.data();
|
||||||
|
const endPoint = await possible.box.endPoint.data();
|
||||||
|
const landmarks = await possible.landmarks.array();
|
||||||
|
this.storedBoxes.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
|
||||||
}
|
}
|
||||||
if (this.storedBoxes.length > 0)
|
if (this.storedBoxes.length > 0)
|
||||||
useFreshBox = true;
|
useFreshBox = true;
|
||||||
|
@ -4119,7 +4122,6 @@ async function predict(input, config3) {
|
||||||
mesh: prediction.mesh,
|
mesh: prediction.mesh,
|
||||||
meshRaw,
|
meshRaw,
|
||||||
annotations: annotations3,
|
annotations: annotations3,
|
||||||
image: prediction.image,
|
|
||||||
tensor: prediction.image
|
tensor: prediction.image
|
||||||
});
|
});
|
||||||
if (prediction.coords)
|
if (prediction.coords)
|
||||||
|
@ -7708,7 +7710,7 @@ var HandDetector = class {
|
||||||
const boxes = this.normalizeBoxes(rawBoxes);
|
const boxes = this.normalizeBoxes(rawBoxes);
|
||||||
tf10.dispose(rawBoxes);
|
tf10.dispose(rawBoxes);
|
||||||
const filteredT = await tf10.image.nonMaxSuppressionAsync(boxes, scores, config3.hand.maxDetected, config3.hand.iouThreshold, config3.hand.minConfidence);
|
const filteredT = await tf10.image.nonMaxSuppressionAsync(boxes, scores, config3.hand.maxDetected, config3.hand.iouThreshold, config3.hand.minConfidence);
|
||||||
const filtered = filteredT.arraySync();
|
const filtered = await filteredT.array();
|
||||||
tf10.dispose(scoresT);
|
tf10.dispose(scoresT);
|
||||||
tf10.dispose(filteredT);
|
tf10.dispose(filteredT);
|
||||||
const hands = [];
|
const hands = [];
|
||||||
|
@ -7738,7 +7740,7 @@ var HandDetector = class {
|
||||||
const boxes = prediction.box.dataSync();
|
const boxes = prediction.box.dataSync();
|
||||||
const startPoint = boxes.slice(0, 2);
|
const startPoint = boxes.slice(0, 2);
|
||||||
const endPoint = boxes.slice(2, 4);
|
const endPoint = boxes.slice(2, 4);
|
||||||
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
const palmLandmarks = await prediction.palmLandmarks.array();
|
||||||
tf10.dispose(prediction.box);
|
tf10.dispose(prediction.box);
|
||||||
tf10.dispose(prediction.palmLandmarks);
|
tf10.dispose(prediction.palmLandmarks);
|
||||||
hands.push(scaleBoxCoordinates2({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
|
hands.push(scaleBoxCoordinates2({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
|
||||||
|
@ -7912,7 +7914,7 @@ var HandPipeline = class {
|
||||||
tf11.dispose(confidenceT);
|
tf11.dispose(confidenceT);
|
||||||
if (confidence >= config3.hand.minConfidence) {
|
if (confidence >= config3.hand.minConfidence) {
|
||||||
const keypointsReshaped = tf11.reshape(keypoints3, [-1, 3]);
|
const keypointsReshaped = tf11.reshape(keypoints3, [-1, 3]);
|
||||||
const rawCoords = keypointsReshaped.arraySync();
|
const rawCoords = await keypointsReshaped.array();
|
||||||
tf11.dispose(keypoints3);
|
tf11.dispose(keypoints3);
|
||||||
tf11.dispose(keypointsReshaped);
|
tf11.dispose(keypointsReshaped);
|
||||||
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||||
|
@ -8311,7 +8313,7 @@ async function predict8(image18, config3) {
|
||||||
tf15.dispose(tensor2);
|
tf15.dispose(tensor2);
|
||||||
if (resT) {
|
if (resT) {
|
||||||
keypoints2.length = 0;
|
keypoints2.length = 0;
|
||||||
const res = resT.arraySync();
|
const res = await resT.array();
|
||||||
tf15.dispose(resT);
|
tf15.dispose(resT);
|
||||||
const kpt3 = res[0][0];
|
const kpt3 = res[0][0];
|
||||||
for (let id = 0; id < kpt3.length; id++) {
|
for (let id = 0; id < kpt3.length; id++) {
|
||||||
|
@ -8464,14 +8466,14 @@ async function process2(res, inputSize, outputShape, config3) {
|
||||||
let id = 0;
|
let id = 0;
|
||||||
let results = [];
|
let results = [];
|
||||||
for (const strideSize of [1, 2, 4]) {
|
for (const strideSize of [1, 2, 4]) {
|
||||||
tf16.tidy(() => {
|
tf16.tidy(async () => {
|
||||||
var _a, _b;
|
var _a, _b;
|
||||||
const baseSize = strideSize * 13;
|
const baseSize = strideSize * 13;
|
||||||
const scoresT = (_a = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] === labels.length)) == null ? void 0 : _a.squeeze();
|
const scoresT = (_a = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] === labels.length)) == null ? void 0 : _a.squeeze();
|
||||||
const featuresT = (_b = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] < labels.length)) == null ? void 0 : _b.squeeze();
|
const featuresT = (_b = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] < labels.length)) == null ? void 0 : _b.squeeze();
|
||||||
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]);
|
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]);
|
||||||
const boxIdx = boxesMax.argMax(2).arraySync();
|
const boxIdx = await boxesMax.argMax(2).array();
|
||||||
const scores = scoresT.arraySync();
|
const scores = await scoresT.array();
|
||||||
for (let i = 0; i < scoresT.shape[0]; i++) {
|
for (let i = 0; i < scoresT.shape[0]; i++) {
|
||||||
for (let j = 0; j < scoresT.shape[1]; j++) {
|
for (let j = 0; j < scoresT.shape[1]; j++) {
|
||||||
const score3 = scores[i][j];
|
const score3 = scores[i][j];
|
||||||
|
@ -8568,15 +8570,15 @@ async function process3(res, inputSize, outputShape, config3) {
|
||||||
if (!res)
|
if (!res)
|
||||||
return [];
|
return [];
|
||||||
const results = [];
|
const results = [];
|
||||||
const detections = res.arraySync();
|
const detections = await res.array();
|
||||||
const squeezeT = tf17.squeeze(res);
|
const squeezeT = tf17.squeeze(res);
|
||||||
tf17.dispose(res);
|
tf17.dispose(res);
|
||||||
const arr = tf17.split(squeezeT, 6, 1);
|
const arr = tf17.split(squeezeT, 6, 1);
|
||||||
tf17.dispose(squeezeT);
|
tf17.dispose(squeezeT);
|
||||||
const stackT = tf17.stack([arr[1], arr[0], arr[3], arr[2]], 1);
|
const stackT = tf17.stack([arr[1], arr[0], arr[3], arr[2]], 1);
|
||||||
const boxesT = stackT.squeeze();
|
const boxesT = tf17.squeeze(stackT);
|
||||||
const scoresT = arr[4].squeeze();
|
const scoresT = tf17.squeeze(arr[4]);
|
||||||
const classesT = arr[5].squeeze();
|
const classesT = tf17.squeeze(arr[5]);
|
||||||
arr.forEach((t) => tf17.dispose(t));
|
arr.forEach((t) => tf17.dispose(t));
|
||||||
const nmsT = await tf17.image.nonMaxSuppressionAsync(boxesT, scoresT, config3.object.maxDetected, config3.object.iouThreshold, config3.object.minConfidence);
|
const nmsT = await tf17.image.nonMaxSuppressionAsync(boxesT, scoresT, config3.object.maxDetected, config3.object.iouThreshold, config3.object.minConfidence);
|
||||||
tf17.dispose(boxesT);
|
tf17.dispose(boxesT);
|
||||||
|
@ -9766,28 +9768,28 @@ var detectFace = async (parent, input) => {
|
||||||
return [];
|
return [];
|
||||||
for (let i = 0; i < faces.length; i++) {
|
for (let i = 0; i < faces.length; i++) {
|
||||||
parent.analyze("Get Face");
|
parent.analyze("Get Face");
|
||||||
if (!faces[i].image || faces[i].image["isDisposedInternal"]) {
|
if (!faces[i].tensor || faces[i].tensor["isDisposedInternal"]) {
|
||||||
log("Face object is disposed:", faces[i].image);
|
log("Face object is disposed:", faces[i].tensor);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const rotation = calculateFaceAngle(faces[i], [input.shape[2], input.shape[1]]);
|
const rotation = calculateFaceAngle(faces[i], [input.shape[2], input.shape[1]]);
|
||||||
parent.analyze("Start Emotion:");
|
parent.analyze("Start Emotion:");
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
|
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
|
||||||
} else {
|
} else {
|
||||||
parent.state = "run:emotion";
|
parent.state = "run:emotion";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
|
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
|
||||||
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze("End Emotion:");
|
parent.analyze("End Emotion:");
|
||||||
parent.analyze("Start Description:");
|
parent.analyze("Start Description:");
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
descRes = parent.config.face.description.enabled ? predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
|
descRes = parent.config.face.description.enabled ? predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
|
||||||
} else {
|
} else {
|
||||||
parent.state = "run:description";
|
parent.state = "run:description";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
descRes = parent.config.face.description.enabled ? await predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
|
descRes = parent.config.face.description.enabled ? await predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
|
||||||
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze("End Description:");
|
parent.analyze("End Description:");
|
||||||
|
@ -9800,6 +9802,10 @@ var detectFace = async (parent, input) => {
|
||||||
delete faces[i].annotations.rightEyeIris;
|
delete faces[i].annotations.rightEyeIris;
|
||||||
}
|
}
|
||||||
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
||||||
|
const tensor2 = parent.config.face.detector.return ? tf20.squeeze(faces[i].tensor) : null;
|
||||||
|
tf20.dispose(faces[i].tensor);
|
||||||
|
if (faces[i].tensor)
|
||||||
|
delete faces[i].tensor;
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
|
@ -9810,11 +9816,8 @@ var detectFace = async (parent, input) => {
|
||||||
emotion: emotionRes,
|
emotion: emotionRes,
|
||||||
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
|
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
|
||||||
rotation,
|
rotation,
|
||||||
tensor: parent.config.face.detector.return ? tf20.squeeze(faces[i].image) : null
|
tensor: tensor2
|
||||||
});
|
});
|
||||||
tf20.dispose(faces[i].image);
|
|
||||||
if (faces[i].image)
|
|
||||||
delete faces[i].image;
|
|
||||||
parent.analyze("End Face");
|
parent.analyze("End Face");
|
||||||
}
|
}
|
||||||
parent.analyze("End FaceMesh:");
|
parent.analyze("End FaceMesh:");
|
||||||
|
|
|
@ -517,7 +517,7 @@ var BlazeFaceModel = class {
|
||||||
});
|
});
|
||||||
this.config = mergeDeep(this.config, userConfig);
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
const nmsTensor = await tf3.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
|
const nmsTensor = await tf3.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
|
||||||
const nms = nmsTensor.arraySync();
|
const nms = await nmsTensor.array();
|
||||||
tf3.dispose(nmsTensor);
|
tf3.dispose(nmsTensor);
|
||||||
const annotatedBoxes = [];
|
const annotatedBoxes = [];
|
||||||
for (let i = 0; i < nms.length; i++) {
|
for (let i = 0; i < nms.length; i++) {
|
||||||
|
@ -3955,7 +3955,10 @@ var Pipeline = class {
|
||||||
this.storedBoxes = [];
|
this.storedBoxes = [];
|
||||||
this.detectedFaces = 0;
|
this.detectedFaces = 0;
|
||||||
for (const possible of detector.boxes) {
|
for (const possible of detector.boxes) {
|
||||||
this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks.arraySync(), confidence: possible.confidence });
|
const startPoint = await possible.box.startPoint.data();
|
||||||
|
const endPoint = await possible.box.endPoint.data();
|
||||||
|
const landmarks = await possible.landmarks.array();
|
||||||
|
this.storedBoxes.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
|
||||||
}
|
}
|
||||||
if (this.storedBoxes.length > 0)
|
if (this.storedBoxes.length > 0)
|
||||||
useFreshBox = true;
|
useFreshBox = true;
|
||||||
|
@ -4118,7 +4121,6 @@ async function predict(input, config3) {
|
||||||
mesh: prediction.mesh,
|
mesh: prediction.mesh,
|
||||||
meshRaw,
|
meshRaw,
|
||||||
annotations: annotations3,
|
annotations: annotations3,
|
||||||
image: prediction.image,
|
|
||||||
tensor: prediction.image
|
tensor: prediction.image
|
||||||
});
|
});
|
||||||
if (prediction.coords)
|
if (prediction.coords)
|
||||||
|
@ -7707,7 +7709,7 @@ var HandDetector = class {
|
||||||
const boxes = this.normalizeBoxes(rawBoxes);
|
const boxes = this.normalizeBoxes(rawBoxes);
|
||||||
tf10.dispose(rawBoxes);
|
tf10.dispose(rawBoxes);
|
||||||
const filteredT = await tf10.image.nonMaxSuppressionAsync(boxes, scores, config3.hand.maxDetected, config3.hand.iouThreshold, config3.hand.minConfidence);
|
const filteredT = await tf10.image.nonMaxSuppressionAsync(boxes, scores, config3.hand.maxDetected, config3.hand.iouThreshold, config3.hand.minConfidence);
|
||||||
const filtered = filteredT.arraySync();
|
const filtered = await filteredT.array();
|
||||||
tf10.dispose(scoresT);
|
tf10.dispose(scoresT);
|
||||||
tf10.dispose(filteredT);
|
tf10.dispose(filteredT);
|
||||||
const hands = [];
|
const hands = [];
|
||||||
|
@ -7737,7 +7739,7 @@ var HandDetector = class {
|
||||||
const boxes = prediction.box.dataSync();
|
const boxes = prediction.box.dataSync();
|
||||||
const startPoint = boxes.slice(0, 2);
|
const startPoint = boxes.slice(0, 2);
|
||||||
const endPoint = boxes.slice(2, 4);
|
const endPoint = boxes.slice(2, 4);
|
||||||
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
const palmLandmarks = await prediction.palmLandmarks.array();
|
||||||
tf10.dispose(prediction.box);
|
tf10.dispose(prediction.box);
|
||||||
tf10.dispose(prediction.palmLandmarks);
|
tf10.dispose(prediction.palmLandmarks);
|
||||||
hands.push(scaleBoxCoordinates2({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
|
hands.push(scaleBoxCoordinates2({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
|
||||||
|
@ -7911,7 +7913,7 @@ var HandPipeline = class {
|
||||||
tf11.dispose(confidenceT);
|
tf11.dispose(confidenceT);
|
||||||
if (confidence >= config3.hand.minConfidence) {
|
if (confidence >= config3.hand.minConfidence) {
|
||||||
const keypointsReshaped = tf11.reshape(keypoints3, [-1, 3]);
|
const keypointsReshaped = tf11.reshape(keypoints3, [-1, 3]);
|
||||||
const rawCoords = keypointsReshaped.arraySync();
|
const rawCoords = await keypointsReshaped.array();
|
||||||
tf11.dispose(keypoints3);
|
tf11.dispose(keypoints3);
|
||||||
tf11.dispose(keypointsReshaped);
|
tf11.dispose(keypointsReshaped);
|
||||||
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||||
|
@ -8310,7 +8312,7 @@ async function predict8(image18, config3) {
|
||||||
tf15.dispose(tensor2);
|
tf15.dispose(tensor2);
|
||||||
if (resT) {
|
if (resT) {
|
||||||
keypoints2.length = 0;
|
keypoints2.length = 0;
|
||||||
const res = resT.arraySync();
|
const res = await resT.array();
|
||||||
tf15.dispose(resT);
|
tf15.dispose(resT);
|
||||||
const kpt3 = res[0][0];
|
const kpt3 = res[0][0];
|
||||||
for (let id = 0; id < kpt3.length; id++) {
|
for (let id = 0; id < kpt3.length; id++) {
|
||||||
|
@ -8463,14 +8465,14 @@ async function process2(res, inputSize, outputShape, config3) {
|
||||||
let id = 0;
|
let id = 0;
|
||||||
let results = [];
|
let results = [];
|
||||||
for (const strideSize of [1, 2, 4]) {
|
for (const strideSize of [1, 2, 4]) {
|
||||||
tf16.tidy(() => {
|
tf16.tidy(async () => {
|
||||||
var _a, _b;
|
var _a, _b;
|
||||||
const baseSize = strideSize * 13;
|
const baseSize = strideSize * 13;
|
||||||
const scoresT = (_a = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] === labels.length)) == null ? void 0 : _a.squeeze();
|
const scoresT = (_a = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] === labels.length)) == null ? void 0 : _a.squeeze();
|
||||||
const featuresT = (_b = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] < labels.length)) == null ? void 0 : _b.squeeze();
|
const featuresT = (_b = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] < labels.length)) == null ? void 0 : _b.squeeze();
|
||||||
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]);
|
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]);
|
||||||
const boxIdx = boxesMax.argMax(2).arraySync();
|
const boxIdx = await boxesMax.argMax(2).array();
|
||||||
const scores = scoresT.arraySync();
|
const scores = await scoresT.array();
|
||||||
for (let i = 0; i < scoresT.shape[0]; i++) {
|
for (let i = 0; i < scoresT.shape[0]; i++) {
|
||||||
for (let j = 0; j < scoresT.shape[1]; j++) {
|
for (let j = 0; j < scoresT.shape[1]; j++) {
|
||||||
const score3 = scores[i][j];
|
const score3 = scores[i][j];
|
||||||
|
@ -8567,15 +8569,15 @@ async function process3(res, inputSize, outputShape, config3) {
|
||||||
if (!res)
|
if (!res)
|
||||||
return [];
|
return [];
|
||||||
const results = [];
|
const results = [];
|
||||||
const detections = res.arraySync();
|
const detections = await res.array();
|
||||||
const squeezeT = tf17.squeeze(res);
|
const squeezeT = tf17.squeeze(res);
|
||||||
tf17.dispose(res);
|
tf17.dispose(res);
|
||||||
const arr = tf17.split(squeezeT, 6, 1);
|
const arr = tf17.split(squeezeT, 6, 1);
|
||||||
tf17.dispose(squeezeT);
|
tf17.dispose(squeezeT);
|
||||||
const stackT = tf17.stack([arr[1], arr[0], arr[3], arr[2]], 1);
|
const stackT = tf17.stack([arr[1], arr[0], arr[3], arr[2]], 1);
|
||||||
const boxesT = stackT.squeeze();
|
const boxesT = tf17.squeeze(stackT);
|
||||||
const scoresT = arr[4].squeeze();
|
const scoresT = tf17.squeeze(arr[4]);
|
||||||
const classesT = arr[5].squeeze();
|
const classesT = tf17.squeeze(arr[5]);
|
||||||
arr.forEach((t) => tf17.dispose(t));
|
arr.forEach((t) => tf17.dispose(t));
|
||||||
const nmsT = await tf17.image.nonMaxSuppressionAsync(boxesT, scoresT, config3.object.maxDetected, config3.object.iouThreshold, config3.object.minConfidence);
|
const nmsT = await tf17.image.nonMaxSuppressionAsync(boxesT, scoresT, config3.object.maxDetected, config3.object.iouThreshold, config3.object.minConfidence);
|
||||||
tf17.dispose(boxesT);
|
tf17.dispose(boxesT);
|
||||||
|
@ -9765,28 +9767,28 @@ var detectFace = async (parent, input) => {
|
||||||
return [];
|
return [];
|
||||||
for (let i = 0; i < faces.length; i++) {
|
for (let i = 0; i < faces.length; i++) {
|
||||||
parent.analyze("Get Face");
|
parent.analyze("Get Face");
|
||||||
if (!faces[i].image || faces[i].image["isDisposedInternal"]) {
|
if (!faces[i].tensor || faces[i].tensor["isDisposedInternal"]) {
|
||||||
log("Face object is disposed:", faces[i].image);
|
log("Face object is disposed:", faces[i].tensor);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const rotation = calculateFaceAngle(faces[i], [input.shape[2], input.shape[1]]);
|
const rotation = calculateFaceAngle(faces[i], [input.shape[2], input.shape[1]]);
|
||||||
parent.analyze("Start Emotion:");
|
parent.analyze("Start Emotion:");
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
|
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
|
||||||
} else {
|
} else {
|
||||||
parent.state = "run:emotion";
|
parent.state = "run:emotion";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
|
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
|
||||||
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze("End Emotion:");
|
parent.analyze("End Emotion:");
|
||||||
parent.analyze("Start Description:");
|
parent.analyze("Start Description:");
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
descRes = parent.config.face.description.enabled ? predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
|
descRes = parent.config.face.description.enabled ? predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
|
||||||
} else {
|
} else {
|
||||||
parent.state = "run:description";
|
parent.state = "run:description";
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
descRes = parent.config.face.description.enabled ? await predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
|
descRes = parent.config.face.description.enabled ? await predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
|
||||||
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze("End Description:");
|
parent.analyze("End Description:");
|
||||||
|
@ -9799,6 +9801,10 @@ var detectFace = async (parent, input) => {
|
||||||
delete faces[i].annotations.rightEyeIris;
|
delete faces[i].annotations.rightEyeIris;
|
||||||
}
|
}
|
||||||
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
||||||
|
const tensor2 = parent.config.face.detector.return ? tf20.squeeze(faces[i].tensor) : null;
|
||||||
|
tf20.dispose(faces[i].tensor);
|
||||||
|
if (faces[i].tensor)
|
||||||
|
delete faces[i].tensor;
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
|
@ -9809,11 +9815,8 @@ var detectFace = async (parent, input) => {
|
||||||
emotion: emotionRes,
|
emotion: emotionRes,
|
||||||
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
|
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
|
||||||
rotation,
|
rotation,
|
||||||
tensor: parent.config.face.detector.return ? tf20.squeeze(faces[i].image) : null
|
tensor: tensor2
|
||||||
});
|
});
|
||||||
tf20.dispose(faces[i].image);
|
|
||||||
if (faces[i].image)
|
|
||||||
delete faces[i].image;
|
|
||||||
parent.analyze("End Face");
|
parent.analyze("End Face");
|
||||||
}
|
}
|
||||||
parent.analyze("End FaceMesh:");
|
parent.analyze("End FaceMesh:");
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -59,15 +59,16 @@
|
||||||
"@tensorflow/tfjs-backend-cpu": "^3.8.0",
|
"@tensorflow/tfjs-backend-cpu": "^3.8.0",
|
||||||
"@tensorflow/tfjs-backend-wasm": "^3.8.0",
|
"@tensorflow/tfjs-backend-wasm": "^3.8.0",
|
||||||
"@tensorflow/tfjs-backend-webgl": "^3.8.0",
|
"@tensorflow/tfjs-backend-webgl": "^3.8.0",
|
||||||
|
"@tensorflow/tfjs-backend-webgpu": "^0.0.1-alpha.7",
|
||||||
"@tensorflow/tfjs-converter": "^3.8.0",
|
"@tensorflow/tfjs-converter": "^3.8.0",
|
||||||
"@tensorflow/tfjs-core": "^3.8.0",
|
"@tensorflow/tfjs-core": "^3.8.0",
|
||||||
"@tensorflow/tfjs-data": "^3.8.0",
|
"@tensorflow/tfjs-data": "^3.8.0",
|
||||||
"@tensorflow/tfjs-layers": "^3.8.0",
|
"@tensorflow/tfjs-layers": "^3.8.0",
|
||||||
"@tensorflow/tfjs-node": "^3.8.0",
|
"@tensorflow/tfjs-node": "^3.8.0",
|
||||||
"@tensorflow/tfjs-node-gpu": "^3.8.0",
|
"@tensorflow/tfjs-node-gpu": "^3.8.0",
|
||||||
"@types/node": "^16.4.13",
|
"@types/node": "^16.4.14",
|
||||||
"@typescript-eslint/eslint-plugin": "^4.29.0",
|
"@typescript-eslint/eslint-plugin": "^4.29.1",
|
||||||
"@typescript-eslint/parser": "^4.29.0",
|
"@typescript-eslint/parser": "^4.29.1",
|
||||||
"@vladmandic/pilogger": "^0.2.18",
|
"@vladmandic/pilogger": "^0.2.18",
|
||||||
"canvas": "^2.8.0",
|
"canvas": "^2.8.0",
|
||||||
"chokidar": "^3.5.2",
|
"chokidar": "^3.5.2",
|
||||||
|
|
|
@ -1,22 +1,22 @@
|
||||||
2021-08-09 08:50:50 [36mINFO: [39m @vladmandic/human version 2.1.2
|
2021-08-11 18:57:35 [36mINFO: [39m @vladmandic/human version 2.1.2
|
||||||
2021-08-09 08:50:50 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
2021-08-11 18:57:35 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||||
2021-08-09 08:50:50 [36mINFO: [39m Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.19","typescript":"4.3.5","typedoc":"0.21.5","eslint":"7.32.0"}
|
2021-08-11 18:57:35 [36mINFO: [39m Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.19","typescript":"4.3.5","typedoc":"0.21.5","eslint":"7.32.0"}
|
||||||
2021-08-09 08:50:50 [36mINFO: [39m Clean: ["dist/*","types/*","typedoc/*"]
|
2021-08-11 18:57:35 [36mINFO: [39m Clean: ["dist/*","types/*","typedoc/*"]
|
||||||
2021-08-09 08:50:50 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
2021-08-11 18:57:35 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||||
2021-08-09 08:50:50 [35mSTATE:[39m target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
|
2021-08-11 18:57:35 [35mSTATE:[39m target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-08-09 08:50:50 [35mSTATE:[39m target: node type: node: {"imports":42,"importBytes":435865,"outputBytes":377825,"outputFiles":"dist/human.node.js"}
|
2021-08-11 18:57:35 [35mSTATE:[39m target: node type: node: {"imports":42,"importBytes":436067,"outputBytes":377952,"outputFiles":"dist/human.node.js"}
|
||||||
2021-08-09 08:50:50 [35mSTATE:[39m target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
|
2021-08-11 18:57:35 [35mSTATE:[39m target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-08-09 08:50:51 [35mSTATE:[39m target: nodeGPU type: node: {"imports":42,"importBytes":435873,"outputBytes":377829,"outputFiles":"dist/human.node-gpu.js"}
|
2021-08-11 18:57:35 [35mSTATE:[39m target: nodeGPU type: node: {"imports":42,"importBytes":436075,"outputBytes":377956,"outputFiles":"dist/human.node-gpu.js"}
|
||||||
2021-08-09 08:50:51 [35mSTATE:[39m target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
|
2021-08-11 18:57:35 [35mSTATE:[39m target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-08-09 08:50:51 [35mSTATE:[39m target: nodeWASM type: node: {"imports":42,"importBytes":435940,"outputBytes":377901,"outputFiles":"dist/human.node-wasm.js"}
|
2021-08-11 18:57:35 [35mSTATE:[39m target: nodeWASM type: node: {"imports":42,"importBytes":436142,"outputBytes":378028,"outputFiles":"dist/human.node-wasm.js"}
|
||||||
2021-08-09 08:50:51 [35mSTATE:[39m target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2111,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
|
2021-08-11 18:57:35 [35mSTATE:[39m target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2168,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-08-09 08:50:51 [35mSTATE:[39m target: browserNoBundle type: esm: {"imports":42,"importBytes":435804,"outputBytes":247953,"outputFiles":"dist/human.esm-nobundle.js"}
|
2021-08-11 18:57:35 [35mSTATE:[39m target: browserNoBundle type: esm: {"imports":42,"importBytes":436006,"outputBytes":248005,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||||
2021-08-09 08:50:51 [35mSTATE:[39m target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2111,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
|
2021-08-11 18:57:36 [35mSTATE:[39m target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2168,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-08-09 08:50:51 [35mSTATE:[39m target: browserBundle type: iife: {"imports":42,"importBytes":2769263,"outputBytes":1378416,"outputFiles":"dist/human.js"}
|
2021-08-11 18:57:36 [35mSTATE:[39m target: browserBundle type: iife: {"imports":42,"importBytes":2769465,"outputBytes":1378447,"outputFiles":"dist/human.js"}
|
||||||
2021-08-09 08:50:52 [35mSTATE:[39m target: browserBundle type: esm: {"imports":42,"importBytes":2769263,"outputBytes":1378408,"outputFiles":"dist/human.esm.js"}
|
2021-08-11 18:57:36 [35mSTATE:[39m target: browserBundle type: esm: {"imports":42,"importBytes":2769465,"outputBytes":1378439,"outputFiles":"dist/human.esm.js"}
|
||||||
2021-08-09 08:50:52 [36mINFO: [39m Running Linter: ["server/","src/","tfjs/","test/","demo/"]
|
2021-08-11 18:57:36 [36mINFO: [39m Running Linter: ["server/","src/","tfjs/","test/","demo/"]
|
||||||
2021-08-09 08:51:14 [36mINFO: [39m Linter complete: files: 75 errors: 0 warnings: 0
|
2021-08-11 18:57:59 [36mINFO: [39m Linter complete: files: 75 errors: 0 warnings: 0
|
||||||
2021-08-09 08:51:14 [36mINFO: [39m Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
|
2021-08-11 18:57:59 [36mINFO: [39m Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||||
2021-08-09 08:51:14 [36mINFO: [39m Generate Typings: ["src/human.ts"] outDir: ["types"]
|
2021-08-11 18:57:59 [36mINFO: [39m Generate Typings: ["src/human.ts"] outDir: ["types"]
|
||||||
2021-08-09 08:51:28 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
|
2021-08-11 18:58:13 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
|
||||||
2021-08-09 08:51:42 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
2021-08-11 18:58:27 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
||||||
|
|
|
@ -64,7 +64,7 @@ export class BlazeFaceModel {
|
||||||
this.config = mergeDeep(this.config, userConfig) as Config;
|
this.config = mergeDeep(this.config, userConfig) as Config;
|
||||||
|
|
||||||
const nmsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
|
const nmsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
|
||||||
const nms = nmsTensor.arraySync();
|
const nms = await nmsTensor.array();
|
||||||
tf.dispose(nmsTensor);
|
tf.dispose(nmsTensor);
|
||||||
const annotatedBoxes: Array<{ box: { startPoint: Tensor, endPoint: Tensor }, landmarks: Tensor, anchor: number[], confidence: number }> = [];
|
const annotatedBoxes: Array<{ box: { startPoint: Tensor, endPoint: Tensor }, landmarks: Tensor, anchor: number[], confidence: number }> = [];
|
||||||
for (let i = 0; i < nms.length; i++) {
|
for (let i = 0; i < nms.length; i++) {
|
||||||
|
|
|
@ -51,7 +51,6 @@ export async function predict(input: Tensor, config: Config): Promise<Face[]> {
|
||||||
mesh: prediction.mesh,
|
mesh: prediction.mesh,
|
||||||
meshRaw,
|
meshRaw,
|
||||||
annotations,
|
annotations,
|
||||||
image: prediction.image,
|
|
||||||
tensor: prediction.image,
|
tensor: prediction.image,
|
||||||
});
|
});
|
||||||
if (prediction.coords) tf.dispose(prediction.coords);
|
if (prediction.coords) tf.dispose(prediction.coords);
|
||||||
|
|
|
@ -168,7 +168,10 @@ export class Pipeline {
|
||||||
this.storedBoxes = [];
|
this.storedBoxes = [];
|
||||||
this.detectedFaces = 0;
|
this.detectedFaces = 0;
|
||||||
for (const possible of detector.boxes) {
|
for (const possible of detector.boxes) {
|
||||||
this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks.arraySync(), confidence: possible.confidence });
|
const startPoint = await possible.box.startPoint.data();
|
||||||
|
const endPoint = await possible.box.endPoint.data();
|
||||||
|
const landmarks = await possible.landmarks.array();
|
||||||
|
this.storedBoxes.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
|
||||||
}
|
}
|
||||||
if (this.storedBoxes.length > 0) useFreshBox = true;
|
if (this.storedBoxes.length > 0) useFreshBox = true;
|
||||||
}
|
}
|
||||||
|
|
28
src/face.ts
28
src/face.ts
|
@ -163,8 +163,8 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
|
|
||||||
// is something went wrong, skip the face
|
// is something went wrong, skip the face
|
||||||
// @ts-ignore possibly undefined
|
// @ts-ignore possibly undefined
|
||||||
if (!faces[i].image || faces[i].image['isDisposedInternal']) {
|
if (!faces[i].tensor || faces[i].tensor['isDisposedInternal']) {
|
||||||
log('Face object is disposed:', faces[i].image);
|
log('Face object is disposed:', faces[i].tensor);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -173,11 +173,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
// run emotion, inherits face from blazeface
|
// run emotion, inherits face from blazeface
|
||||||
parent.analyze('Start Emotion:');
|
parent.analyze('Start Emotion:');
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
emotionRes = parent.config.face.emotion.enabled ? emotion.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : {};
|
emotionRes = parent.config.face.emotion.enabled ? emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
|
||||||
} else {
|
} else {
|
||||||
parent.state = 'run:emotion';
|
parent.state = 'run:emotion';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : {};
|
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
|
||||||
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze('End Emotion:');
|
parent.analyze('End Emotion:');
|
||||||
|
@ -186,11 +186,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
/*
|
/*
|
||||||
parent.analyze('Start GEAR:');
|
parent.analyze('Start GEAR:');
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
gearRes = parent.config.face.agegenderrace.enabled ? agegenderrace.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : {};
|
gearRes = parent.config.face.agegenderrace.enabled ? agegenderrace.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
|
||||||
} else {
|
} else {
|
||||||
parent.state = 'run:gear';
|
parent.state = 'run:gear';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
gearRes = parent.config.face.agegenderrace.enabled ? await agegenderrace.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : {};
|
gearRes = parent.config.face.agegenderrace.enabled ? await agegenderrace.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
|
||||||
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
parent.performance.emotion = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze('End GEAR:');
|
parent.analyze('End GEAR:');
|
||||||
|
@ -199,11 +199,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
// run emotion, inherits face from blazeface
|
// run emotion, inherits face from blazeface
|
||||||
parent.analyze('Start Description:');
|
parent.analyze('Start Description:');
|
||||||
if (parent.config.async) {
|
if (parent.config.async) {
|
||||||
descRes = parent.config.face.description.enabled ? faceres.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : [];
|
descRes = parent.config.face.description.enabled ? faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : [];
|
||||||
} else {
|
} else {
|
||||||
parent.state = 'run:description';
|
parent.state = 'run:description';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : [];
|
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : [];
|
||||||
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
parent.performance.embedding = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
parent.analyze('End Description:');
|
parent.analyze('End Description:');
|
||||||
|
@ -226,6 +226,12 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2]
|
? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2]
|
||||||
: 0;
|
: 0;
|
||||||
|
|
||||||
|
// optionally return tensor
|
||||||
|
const tensor = parent.config.face.detector.return ? tf.squeeze(faces[i].tensor) : null;
|
||||||
|
// dispose original face tensor
|
||||||
|
tf.dispose(faces[i].tensor);
|
||||||
|
// delete temp face image
|
||||||
|
if (faces[i].tensor) delete faces[i].tensor;
|
||||||
// combine results
|
// combine results
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
|
@ -237,12 +243,8 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
emotion: emotionRes,
|
emotion: emotionRes,
|
||||||
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
|
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
|
||||||
rotation,
|
rotation,
|
||||||
tensor: parent.config.face.detector.return ? tf.squeeze(faces[i].image) : null,
|
tensor,
|
||||||
});
|
});
|
||||||
// dispose original face tensor
|
|
||||||
tf.dispose(faces[i].image);
|
|
||||||
// delete temp face image
|
|
||||||
if (faces[i].image) delete faces[i].image;
|
|
||||||
|
|
||||||
parent.analyze('End Face');
|
parent.analyze('End Face');
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ export class HandDetector {
|
||||||
const boxes = this.normalizeBoxes(rawBoxes);
|
const boxes = this.normalizeBoxes(rawBoxes);
|
||||||
tf.dispose(rawBoxes);
|
tf.dispose(rawBoxes);
|
||||||
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);
|
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);
|
||||||
const filtered = filteredT.arraySync();
|
const filtered = await filteredT.array();
|
||||||
|
|
||||||
tf.dispose(scoresT);
|
tf.dispose(scoresT);
|
||||||
tf.dispose(filteredT);
|
tf.dispose(filteredT);
|
||||||
|
@ -81,7 +81,7 @@ export class HandDetector {
|
||||||
const boxes = prediction.box.dataSync();
|
const boxes = prediction.box.dataSync();
|
||||||
const startPoint = boxes.slice(0, 2);
|
const startPoint = boxes.slice(0, 2);
|
||||||
const endPoint = boxes.slice(2, 4);
|
const endPoint = boxes.slice(2, 4);
|
||||||
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
const palmLandmarks = await prediction.palmLandmarks.array();
|
||||||
tf.dispose(prediction.box);
|
tf.dispose(prediction.box);
|
||||||
tf.dispose(prediction.palmLandmarks);
|
tf.dispose(prediction.palmLandmarks);
|
||||||
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
|
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
|
||||||
|
|
|
@ -122,7 +122,7 @@ export class HandPipeline {
|
||||||
tf.dispose(confidenceT);
|
tf.dispose(confidenceT);
|
||||||
if (confidence >= config.hand.minConfidence) {
|
if (confidence >= config.hand.minConfidence) {
|
||||||
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
||||||
const rawCoords = keypointsReshaped.arraySync();
|
const rawCoords = await keypointsReshaped.array();
|
||||||
tf.dispose(keypoints);
|
tf.dispose(keypoints);
|
||||||
tf.dispose(keypointsReshaped);
|
tf.dispose(keypointsReshaped);
|
||||||
const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||||
|
|
|
@ -50,7 +50,7 @@ export async function predict(image: Tensor, config: Config): Promise<Body[]> {
|
||||||
|
|
||||||
if (resT) {
|
if (resT) {
|
||||||
keypoints.length = 0;
|
keypoints.length = 0;
|
||||||
const res = resT.arraySync();
|
const res = await resT.array();
|
||||||
tf.dispose(resT);
|
tf.dispose(resT);
|
||||||
const kpt = res[0][0];
|
const kpt = res[0][0];
|
||||||
for (let id = 0; id < kpt.length; id++) {
|
for (let id = 0; id < kpt.length; id++) {
|
||||||
|
|
|
@ -28,15 +28,15 @@ export async function load(config: Config): Promise<GraphModel> {
|
||||||
async function process(res: Tensor, inputSize, outputShape, config: Config) {
|
async function process(res: Tensor, inputSize, outputShape, config: Config) {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const results: Array<Item> = [];
|
const results: Array<Item> = [];
|
||||||
const detections = res.arraySync();
|
const detections = await res.array();
|
||||||
const squeezeT = tf.squeeze(res);
|
const squeezeT = tf.squeeze(res);
|
||||||
tf.dispose(res);
|
tf.dispose(res);
|
||||||
const arr = tf.split(squeezeT, 6, 1); // x1, y1, x2, y2, score, class
|
const arr = tf.split(squeezeT, 6, 1); // x1, y1, x2, y2, score, class
|
||||||
tf.dispose(squeezeT);
|
tf.dispose(squeezeT);
|
||||||
const stackT = tf.stack([arr[1], arr[0], arr[3], arr[2]], 1); // reorder dims as tf.nms expects y, x
|
const stackT = tf.stack([arr[1], arr[0], arr[3], arr[2]], 1); // reorder dims as tf.nms expects y, x
|
||||||
const boxesT = stackT.squeeze();
|
const boxesT = tf.squeeze(stackT);
|
||||||
const scoresT = arr[4].squeeze();
|
const scoresT = tf.squeeze(arr[4]);
|
||||||
const classesT = arr[5].squeeze();
|
const classesT = tf.squeeze(arr[5]);
|
||||||
arr.forEach((t) => tf.dispose(t));
|
arr.forEach((t) => tf.dispose(t));
|
||||||
const nmsT = await tf.image.nonMaxSuppressionAsync(boxesT, scoresT, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
|
const nmsT = await tf.image.nonMaxSuppressionAsync(boxesT, scoresT, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
|
||||||
tf.dispose(boxesT);
|
tf.dispose(boxesT);
|
||||||
|
|
|
@ -32,14 +32,14 @@ async function process(res, inputSize, outputShape, config) {
|
||||||
let results: Array<Item> = [];
|
let results: Array<Item> = [];
|
||||||
for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects
|
for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects
|
||||||
// find scores, boxes, classes
|
// find scores, boxes, classes
|
||||||
tf.tidy(() => { // wrap in tidy to automatically deallocate temp tensors
|
tf.tidy(async () => { // wrap in tidy to automatically deallocate temp tensors
|
||||||
const baseSize = strideSize * 13; // 13x13=169, 26x26=676, 52x52=2704
|
const baseSize = strideSize * 13; // 13x13=169, 26x26=676, 52x52=2704
|
||||||
// find boxes and scores output depending on stride
|
// find boxes and scores output depending on stride
|
||||||
const scoresT = res.find((a) => (a.shape[1] === (baseSize ** 2) && a.shape[2] === labels.length))?.squeeze();
|
const scoresT = res.find((a) => (a.shape[1] === (baseSize ** 2) && a.shape[2] === labels.length))?.squeeze();
|
||||||
const featuresT = res.find((a) => (a.shape[1] === (baseSize ** 2) && a.shape[2] < labels.length))?.squeeze();
|
const featuresT = res.find((a) => (a.shape[1] === (baseSize ** 2) && a.shape[2] < labels.length))?.squeeze();
|
||||||
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]); // reshape [output] to [4, output / 4] where number is number of different features inside each stride
|
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]); // reshape [output] to [4, output / 4] where number is number of different features inside each stride
|
||||||
const boxIdx = boxesMax.argMax(2).arraySync(); // what we need is indexes of features with highest scores, not values itself
|
const boxIdx = await boxesMax.argMax(2).array(); // what we need is indexes of features with highest scores, not values itself
|
||||||
const scores = scoresT.arraySync(); // optionally use exponential scores or just as-is
|
const scores = await scoresT.array(); // optionally use exponential scores or just as-is
|
||||||
for (let i = 0; i < scoresT.shape[0]; i++) { // total strides (x * y matrix)
|
for (let i = 0; i < scoresT.shape[0]; i++) { // total strides (x * y matrix)
|
||||||
for (let j = 0; j < scoresT.shape[1]; j++) { // one score for each class
|
for (let j = 0; j < scoresT.shape[1]; j++) { // one score for each class
|
||||||
const score = scores[i][j]; // get score for current position
|
const score = scores[i][j]; // get score for current position
|
||||||
|
|
|
@ -53,8 +53,7 @@ export interface Face {
|
||||||
matrix: [number, number, number, number, number, number, number, number, number],
|
matrix: [number, number, number, number, number, number, number, number, number],
|
||||||
gaze: { bearing: number, strength: number },
|
gaze: { bearing: number, strength: number },
|
||||||
}
|
}
|
||||||
image?: Tensor;
|
tensor?: Tensor,
|
||||||
tensor: Tensor,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Body results
|
/** Body results
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
* @external
|
* @external
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// import from src
|
|
||||||
// get versions of all packages
|
// get versions of all packages
|
||||||
import { version as tfjsVersion } from '@tensorflow/tfjs/package.json';
|
import { version as tfjsVersion } from '@tensorflow/tfjs/package.json';
|
||||||
import { version as tfjsCoreVersion } from '@tensorflow/tfjs-core/package.json';
|
import { version as tfjsCoreVersion } from '@tensorflow/tfjs-core/package.json';
|
||||||
|
@ -14,7 +13,7 @@ import { version as tfjsBackendCPUVersion } from '@tensorflow/tfjs-backend-cpu/p
|
||||||
import { version as tfjsBackendWebGLVersion } from '@tensorflow/tfjs-backend-webgl/package.json';
|
import { version as tfjsBackendWebGLVersion } from '@tensorflow/tfjs-backend-webgl/package.json';
|
||||||
import { version as tfjsBackendWASMVersion } from '@tensorflow/tfjs-backend-wasm/package.json';
|
import { version as tfjsBackendWASMVersion } from '@tensorflow/tfjs-backend-wasm/package.json';
|
||||||
|
|
||||||
// export all
|
// export all from sources
|
||||||
// requires treeShaking:ignore-annotations due to tfjs misconfiguration
|
// requires treeShaking:ignore-annotations due to tfjs misconfiguration
|
||||||
/*
|
/*
|
||||||
export * from '@tensorflow/tfjs-core/src/index';
|
export * from '@tensorflow/tfjs-core/src/index';
|
||||||
|
@ -26,7 +25,7 @@ export * from '@tensorflow/tfjs-backend-webgl/src/index';
|
||||||
export * from '@tensorflow/tfjs-backend-wasm/src/index';
|
export * from '@tensorflow/tfjs-backend-wasm/src/index';
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// export all
|
// export all from build
|
||||||
export * from '@tensorflow/tfjs-core/dist/index.js';
|
export * from '@tensorflow/tfjs-core/dist/index.js';
|
||||||
export * from '@tensorflow/tfjs-layers/dist/index.js';
|
export * from '@tensorflow/tfjs-layers/dist/index.js';
|
||||||
export * from '@tensorflow/tfjs-converter/dist/index.js';
|
export * from '@tensorflow/tfjs-converter/dist/index.js';
|
||||||
|
@ -34,6 +33,7 @@ export * as data from '@tensorflow/tfjs-data/dist/index.js';
|
||||||
export * from '@tensorflow/tfjs-backend-cpu/dist/index.js';
|
export * from '@tensorflow/tfjs-backend-cpu/dist/index.js';
|
||||||
export * from '@tensorflow/tfjs-backend-webgl/dist/index.js';
|
export * from '@tensorflow/tfjs-backend-webgl/dist/index.js';
|
||||||
export * from '@tensorflow/tfjs-backend-wasm/dist/index.js';
|
export * from '@tensorflow/tfjs-backend-wasm/dist/index.js';
|
||||||
|
// export * from '@tensorflow/tfjs-backend-webgpu/dist/index.js'; // experimental
|
||||||
|
|
||||||
// export versions
|
// export versions
|
||||||
export const version = {
|
export const version = {
|
||||||
|
@ -46,4 +46,3 @@ export const version = {
|
||||||
'tfjs-backend-webgl': tfjsBackendWebGLVersion,
|
'tfjs-backend-webgl': tfjsBackendWebGLVersion,
|
||||||
'tfjs-backend-wasm': tfjsBackendWASMVersion,
|
'tfjs-backend-wasm': tfjsBackendWASMVersion,
|
||||||
};
|
};
|
||||||
// export const version = {};
|
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -122,7 +122,6 @@
|
||||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#gender" class="tsd-kind-icon">gender</a></li>
|
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#gender" class="tsd-kind-icon">gender</a></li>
|
||||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#genderScore" class="tsd-kind-icon">gender<wbr>Score</a></li>
|
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#genderScore" class="tsd-kind-icon">gender<wbr>Score</a></li>
|
||||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#id" class="tsd-kind-icon">id</a></li>
|
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#id" class="tsd-kind-icon">id</a></li>
|
||||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#image" class="tsd-kind-icon">image</a></li>
|
|
||||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#iris" class="tsd-kind-icon">iris</a></li>
|
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#iris" class="tsd-kind-icon">iris</a></li>
|
||||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#mesh" class="tsd-kind-icon">mesh</a></li>
|
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#mesh" class="tsd-kind-icon">mesh</a></li>
|
||||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#meshRaw" class="tsd-kind-icon">mesh<wbr>Raw</a></li>
|
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#meshRaw" class="tsd-kind-icon">mesh<wbr>Raw</a></li>
|
||||||
|
@ -213,13 +212,6 @@
|
||||||
<aside class="tsd-sources">
|
<aside class="tsd-sources">
|
||||||
</aside>
|
</aside>
|
||||||
</section>
|
</section>
|
||||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
|
||||||
<a name="image" class="tsd-anchor"></a>
|
|
||||||
<h3><span class="tsd-flag ts-flagOptional">Optional</span> image</h3>
|
|
||||||
<div class="tsd-signature tsd-kind-icon">image<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></div>
|
|
||||||
<aside class="tsd-sources">
|
|
||||||
</aside>
|
|
||||||
</section>
|
|
||||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||||
<a name="iris" class="tsd-anchor"></a>
|
<a name="iris" class="tsd-anchor"></a>
|
||||||
<h3><span class="tsd-flag ts-flagOptional">Optional</span> iris</h3>
|
<h3><span class="tsd-flag ts-flagOptional">Optional</span> iris</h3>
|
||||||
|
@ -290,7 +282,7 @@
|
||||||
</section>
|
</section>
|
||||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||||
<a name="tensor" class="tsd-anchor"></a>
|
<a name="tensor" class="tsd-anchor"></a>
|
||||||
<h3>tensor</h3>
|
<h3><span class="tsd-flag ts-flagOptional">Optional</span> tensor</h3>
|
||||||
<div class="tsd-signature tsd-kind-icon">tensor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></div>
|
<div class="tsd-signature tsd-kind-icon">tensor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></div>
|
||||||
<aside class="tsd-sources">
|
<aside class="tsd-sources">
|
||||||
</aside>
|
</aside>
|
||||||
|
@ -360,9 +352,6 @@
|
||||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||||
<a href="Face.html#id" class="tsd-kind-icon">id</a>
|
<a href="Face.html#id" class="tsd-kind-icon">id</a>
|
||||||
</li>
|
</li>
|
||||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
|
||||||
<a href="Face.html#image" class="tsd-kind-icon">image</a>
|
|
||||||
</li>
|
|
||||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||||
<a href="Face.html#iris" class="tsd-kind-icon">iris</a>
|
<a href="Face.html#iris" class="tsd-kind-icon">iris</a>
|
||||||
</li>
|
</li>
|
||||||
|
|
|
@ -61,8 +61,7 @@ export interface Face {
|
||||||
strength: number;
|
strength: number;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
image?: Tensor;
|
tensor?: Tensor;
|
||||||
tensor: Tensor;
|
|
||||||
}
|
}
|
||||||
/** Body results
|
/** Body results
|
||||||
*
|
*
|
||||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
||||||
Subproject commit 2135debf198b5b0ecb670896bef837cbb45fe32e
|
Subproject commit bdc4077a3df07abdf4a2d5b2d2beadf2e573e8d8
|
Loading…
Reference in New Issue