fix centernet & update blazeface

pull/193/head
Vladimir Mandic 2021-08-11 18:59:02 -04:00
parent d1ec2d9f14
commit 3adbbe152b
28 changed files with 243 additions and 237 deletions

View File

@ -9,7 +9,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog
### **HEAD -> main** 2021/08/06 mandic00@live.com
### **HEAD -> main** 2021/08/09 mandic00@live.com
- minor update
- replace movenet with lightning-v4

View File

@ -40,15 +40,15 @@ let userConfig = {
enabled: false,
flip: false,
},
face: { enabled: false,
detector: { return: true },
face: { enabled: true,
detector: { return: false },
mesh: { enabled: true },
iris: { enabled: false },
description: { enabled: false },
emotion: { enabled: false },
},
object: { enabled: false },
gesture: { enabled: true },
gesture: { enabled: false },
hand: { enabled: false },
body: { enabled: false },
// body: { enabled: true, modelPath: 'posenet.json' },
@ -164,7 +164,8 @@ function status(msg) {
div.innerText = msg;
} else {
const video = document.getElementById('video');
document.getElementById('play').style.display = (video.srcObject !== null) && !video.paused ? 'none' : 'block';
const playing = (video.srcObject !== null) && !video.paused;
document.getElementById('play').style.display = playing ? 'none' : 'block';
document.getElementById('loader').style.display = 'none';
div.innerText = '';
}
@ -259,9 +260,10 @@ async function drawResults(input) {
const avgDraw = ui.drawFPS.length > 0 ? Math.trunc(10 * ui.drawFPS.reduce((a, b) => a + b, 0) / ui.drawFPS.length) / 10 : 0;
const warning = (ui.detectFPS.length > 5) && (avgDetect < 2) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '';
const fps = avgDetect > 0 ? `FPS process:${avgDetect} refresh:${avgDraw}` : '';
const backend = engine.state.numTensors > 0 ? `backend: ${human.tf.getBackend()} | ${memory}` : 'running in web worker';
document.getElementById('log').innerHTML = `
video: ${ui.camera.name} | facing: ${ui.camera.facing} | screen: ${window.innerWidth} x ${window.innerHeight} camera: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
backend: ${human.tf.getBackend()} | ${memory}<br>
backend: ${backend}<br>
performance: ${str(lastDetectedResult.performance)}ms ${fps}<br>
${warning}<br>
`;
@ -363,6 +365,7 @@ async function setupCamera() {
// eslint-disable-next-line no-use-before-define
if (live && !ui.detectThread) runHumanDetect(video, canvas);
ui.busy = false;
status();
resolve();
};
});
@ -597,6 +600,7 @@ async function detectVideo() {
document.getElementById('btnStartText').innerHTML = 'pause video';
await video.play();
runHumanDetect(video, canvas);
status();
} else {
status(cameraError);
}
@ -878,6 +882,7 @@ async function pwaRegister() {
}
async function main() {
/*
window.addEventListener('unhandledrejection', (evt) => {
// eslint-disable-next-line no-console
console.error(evt.reason || evt);
@ -885,6 +890,7 @@ async function main() {
status('exception error');
evt.preventDefault();
});
*/
log('demo starting ...');

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

76
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

76
dist/human.js vendored

File diff suppressed because one or more lines are too long

View File

@ -517,7 +517,7 @@ var BlazeFaceModel = class {
});
this.config = mergeDeep(this.config, userConfig);
const nmsTensor = await tf3.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
const nms = nmsTensor.arraySync();
const nms = await nmsTensor.array();
tf3.dispose(nmsTensor);
const annotatedBoxes = [];
for (let i = 0; i < nms.length; i++) {
@ -3955,7 +3955,10 @@ var Pipeline = class {
this.storedBoxes = [];
this.detectedFaces = 0;
for (const possible of detector.boxes) {
this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks.arraySync(), confidence: possible.confidence });
const startPoint = await possible.box.startPoint.data();
const endPoint = await possible.box.endPoint.data();
const landmarks = await possible.landmarks.array();
this.storedBoxes.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
}
if (this.storedBoxes.length > 0)
useFreshBox = true;
@ -4118,7 +4121,6 @@ async function predict(input, config3) {
mesh: prediction.mesh,
meshRaw,
annotations: annotations3,
image: prediction.image,
tensor: prediction.image
});
if (prediction.coords)
@ -7707,7 +7709,7 @@ var HandDetector = class {
const boxes = this.normalizeBoxes(rawBoxes);
tf10.dispose(rawBoxes);
const filteredT = await tf10.image.nonMaxSuppressionAsync(boxes, scores, config3.hand.maxDetected, config3.hand.iouThreshold, config3.hand.minConfidence);
const filtered = filteredT.arraySync();
const filtered = await filteredT.array();
tf10.dispose(scoresT);
tf10.dispose(filteredT);
const hands = [];
@ -7737,7 +7739,7 @@ var HandDetector = class {
const boxes = prediction.box.dataSync();
const startPoint = boxes.slice(0, 2);
const endPoint = boxes.slice(2, 4);
const palmLandmarks = prediction.palmLandmarks.arraySync();
const palmLandmarks = await prediction.palmLandmarks.array();
tf10.dispose(prediction.box);
tf10.dispose(prediction.palmLandmarks);
hands.push(scaleBoxCoordinates2({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
@ -7911,7 +7913,7 @@ var HandPipeline = class {
tf11.dispose(confidenceT);
if (confidence >= config3.hand.minConfidence) {
const keypointsReshaped = tf11.reshape(keypoints3, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync();
const rawCoords = await keypointsReshaped.array();
tf11.dispose(keypoints3);
tf11.dispose(keypointsReshaped);
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
@ -8310,7 +8312,7 @@ async function predict8(image18, config3) {
tf15.dispose(tensor2);
if (resT) {
keypoints2.length = 0;
const res = resT.arraySync();
const res = await resT.array();
tf15.dispose(resT);
const kpt3 = res[0][0];
for (let id = 0; id < kpt3.length; id++) {
@ -8463,14 +8465,14 @@ async function process2(res, inputSize, outputShape, config3) {
let id = 0;
let results = [];
for (const strideSize of [1, 2, 4]) {
tf16.tidy(() => {
tf16.tidy(async () => {
var _a, _b;
const baseSize = strideSize * 13;
const scoresT = (_a = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] === labels.length)) == null ? void 0 : _a.squeeze();
const featuresT = (_b = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] < labels.length)) == null ? void 0 : _b.squeeze();
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]);
const boxIdx = boxesMax.argMax(2).arraySync();
const scores = scoresT.arraySync();
const boxIdx = await boxesMax.argMax(2).array();
const scores = await scoresT.array();
for (let i = 0; i < scoresT.shape[0]; i++) {
for (let j = 0; j < scoresT.shape[1]; j++) {
const score3 = scores[i][j];
@ -8567,15 +8569,15 @@ async function process3(res, inputSize, outputShape, config3) {
if (!res)
return [];
const results = [];
const detections = res.arraySync();
const detections = await res.array();
const squeezeT = tf17.squeeze(res);
tf17.dispose(res);
const arr = tf17.split(squeezeT, 6, 1);
tf17.dispose(squeezeT);
const stackT = tf17.stack([arr[1], arr[0], arr[3], arr[2]], 1);
const boxesT = stackT.squeeze();
const scoresT = arr[4].squeeze();
const classesT = arr[5].squeeze();
const boxesT = tf17.squeeze(stackT);
const scoresT = tf17.squeeze(arr[4]);
const classesT = tf17.squeeze(arr[5]);
arr.forEach((t) => tf17.dispose(t));
const nmsT = await tf17.image.nonMaxSuppressionAsync(boxesT, scoresT, config3.object.maxDetected, config3.object.iouThreshold, config3.object.minConfidence);
tf17.dispose(boxesT);
@ -9765,28 +9767,28 @@ var detectFace = async (parent, input) => {
return [];
for (let i = 0; i < faces.length; i++) {
parent.analyze("Get Face");
if (!faces[i].image || faces[i].image["isDisposedInternal"]) {
log("Face object is disposed:", faces[i].image);
if (!faces[i].tensor || faces[i].tensor["isDisposedInternal"]) {
log("Face object is disposed:", faces[i].tensor);
continue;
}
const rotation = calculateFaceAngle(faces[i], [input.shape[2], input.shape[1]]);
parent.analyze("Start Emotion:");
if (parent.config.async) {
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
} else {
parent.state = "run:emotion";
timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze("End Emotion:");
parent.analyze("Start Description:");
if (parent.config.async) {
descRes = parent.config.face.description.enabled ? predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
descRes = parent.config.face.description.enabled ? predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
} else {
parent.state = "run:description";
timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
descRes = parent.config.face.description.enabled ? await predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
parent.performance.embedding = Math.trunc(now() - timeStamp);
}
parent.analyze("End Description:");
@ -9799,6 +9801,10 @@ var detectFace = async (parent, input) => {
delete faces[i].annotations.rightEyeIris;
}
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
const tensor2 = parent.config.face.detector.return ? tf20.squeeze(faces[i].tensor) : null;
tf20.dispose(faces[i].tensor);
if (faces[i].tensor)
delete faces[i].tensor;
faceRes.push({
...faces[i],
id: i,
@ -9809,11 +9815,8 @@ var detectFace = async (parent, input) => {
emotion: emotionRes,
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation,
tensor: parent.config.face.detector.return ? tf20.squeeze(faces[i].image) : null
tensor: tensor2
});
tf20.dispose(faces[i].image);
if (faces[i].image)
delete faces[i].image;
parent.analyze("End Face");
}
parent.analyze("End FaceMesh:");

View File

@ -518,7 +518,7 @@ var BlazeFaceModel = class {
});
this.config = mergeDeep(this.config, userConfig);
const nmsTensor = await tf3.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
const nms = nmsTensor.arraySync();
const nms = await nmsTensor.array();
tf3.dispose(nmsTensor);
const annotatedBoxes = [];
for (let i = 0; i < nms.length; i++) {
@ -3956,7 +3956,10 @@ var Pipeline = class {
this.storedBoxes = [];
this.detectedFaces = 0;
for (const possible of detector.boxes) {
this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks.arraySync(), confidence: possible.confidence });
const startPoint = await possible.box.startPoint.data();
const endPoint = await possible.box.endPoint.data();
const landmarks = await possible.landmarks.array();
this.storedBoxes.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
}
if (this.storedBoxes.length > 0)
useFreshBox = true;
@ -4119,7 +4122,6 @@ async function predict(input, config3) {
mesh: prediction.mesh,
meshRaw,
annotations: annotations3,
image: prediction.image,
tensor: prediction.image
});
if (prediction.coords)
@ -7708,7 +7710,7 @@ var HandDetector = class {
const boxes = this.normalizeBoxes(rawBoxes);
tf10.dispose(rawBoxes);
const filteredT = await tf10.image.nonMaxSuppressionAsync(boxes, scores, config3.hand.maxDetected, config3.hand.iouThreshold, config3.hand.minConfidence);
const filtered = filteredT.arraySync();
const filtered = await filteredT.array();
tf10.dispose(scoresT);
tf10.dispose(filteredT);
const hands = [];
@ -7738,7 +7740,7 @@ var HandDetector = class {
const boxes = prediction.box.dataSync();
const startPoint = boxes.slice(0, 2);
const endPoint = boxes.slice(2, 4);
const palmLandmarks = prediction.palmLandmarks.arraySync();
const palmLandmarks = await prediction.palmLandmarks.array();
tf10.dispose(prediction.box);
tf10.dispose(prediction.palmLandmarks);
hands.push(scaleBoxCoordinates2({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
@ -7912,7 +7914,7 @@ var HandPipeline = class {
tf11.dispose(confidenceT);
if (confidence >= config3.hand.minConfidence) {
const keypointsReshaped = tf11.reshape(keypoints3, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync();
const rawCoords = await keypointsReshaped.array();
tf11.dispose(keypoints3);
tf11.dispose(keypointsReshaped);
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
@ -8311,7 +8313,7 @@ async function predict8(image18, config3) {
tf15.dispose(tensor2);
if (resT) {
keypoints2.length = 0;
const res = resT.arraySync();
const res = await resT.array();
tf15.dispose(resT);
const kpt3 = res[0][0];
for (let id = 0; id < kpt3.length; id++) {
@ -8464,14 +8466,14 @@ async function process2(res, inputSize, outputShape, config3) {
let id = 0;
let results = [];
for (const strideSize of [1, 2, 4]) {
tf16.tidy(() => {
tf16.tidy(async () => {
var _a, _b;
const baseSize = strideSize * 13;
const scoresT = (_a = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] === labels.length)) == null ? void 0 : _a.squeeze();
const featuresT = (_b = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] < labels.length)) == null ? void 0 : _b.squeeze();
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]);
const boxIdx = boxesMax.argMax(2).arraySync();
const scores = scoresT.arraySync();
const boxIdx = await boxesMax.argMax(2).array();
const scores = await scoresT.array();
for (let i = 0; i < scoresT.shape[0]; i++) {
for (let j = 0; j < scoresT.shape[1]; j++) {
const score3 = scores[i][j];
@ -8568,15 +8570,15 @@ async function process3(res, inputSize, outputShape, config3) {
if (!res)
return [];
const results = [];
const detections = res.arraySync();
const detections = await res.array();
const squeezeT = tf17.squeeze(res);
tf17.dispose(res);
const arr = tf17.split(squeezeT, 6, 1);
tf17.dispose(squeezeT);
const stackT = tf17.stack([arr[1], arr[0], arr[3], arr[2]], 1);
const boxesT = stackT.squeeze();
const scoresT = arr[4].squeeze();
const classesT = arr[5].squeeze();
const boxesT = tf17.squeeze(stackT);
const scoresT = tf17.squeeze(arr[4]);
const classesT = tf17.squeeze(arr[5]);
arr.forEach((t) => tf17.dispose(t));
const nmsT = await tf17.image.nonMaxSuppressionAsync(boxesT, scoresT, config3.object.maxDetected, config3.object.iouThreshold, config3.object.minConfidence);
tf17.dispose(boxesT);
@ -9766,28 +9768,28 @@ var detectFace = async (parent, input) => {
return [];
for (let i = 0; i < faces.length; i++) {
parent.analyze("Get Face");
if (!faces[i].image || faces[i].image["isDisposedInternal"]) {
log("Face object is disposed:", faces[i].image);
if (!faces[i].tensor || faces[i].tensor["isDisposedInternal"]) {
log("Face object is disposed:", faces[i].tensor);
continue;
}
const rotation = calculateFaceAngle(faces[i], [input.shape[2], input.shape[1]]);
parent.analyze("Start Emotion:");
if (parent.config.async) {
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
} else {
parent.state = "run:emotion";
timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze("End Emotion:");
parent.analyze("Start Description:");
if (parent.config.async) {
descRes = parent.config.face.description.enabled ? predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
descRes = parent.config.face.description.enabled ? predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
} else {
parent.state = "run:description";
timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
descRes = parent.config.face.description.enabled ? await predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
parent.performance.embedding = Math.trunc(now() - timeStamp);
}
parent.analyze("End Description:");
@ -9800,6 +9802,10 @@ var detectFace = async (parent, input) => {
delete faces[i].annotations.rightEyeIris;
}
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
const tensor2 = parent.config.face.detector.return ? tf20.squeeze(faces[i].tensor) : null;
tf20.dispose(faces[i].tensor);
if (faces[i].tensor)
delete faces[i].tensor;
faceRes.push({
...faces[i],
id: i,
@ -9810,11 +9816,8 @@ var detectFace = async (parent, input) => {
emotion: emotionRes,
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation,
tensor: parent.config.face.detector.return ? tf20.squeeze(faces[i].image) : null
tensor: tensor2
});
tf20.dispose(faces[i].image);
if (faces[i].image)
delete faces[i].image;
parent.analyze("End Face");
}
parent.analyze("End FaceMesh:");

51
dist/human.node.js vendored
View File

@ -517,7 +517,7 @@ var BlazeFaceModel = class {
});
this.config = mergeDeep(this.config, userConfig);
const nmsTensor = await tf3.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
const nms = nmsTensor.arraySync();
const nms = await nmsTensor.array();
tf3.dispose(nmsTensor);
const annotatedBoxes = [];
for (let i = 0; i < nms.length; i++) {
@ -3955,7 +3955,10 @@ var Pipeline = class {
this.storedBoxes = [];
this.detectedFaces = 0;
for (const possible of detector.boxes) {
this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks.arraySync(), confidence: possible.confidence });
const startPoint = await possible.box.startPoint.data();
const endPoint = await possible.box.endPoint.data();
const landmarks = await possible.landmarks.array();
this.storedBoxes.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
}
if (this.storedBoxes.length > 0)
useFreshBox = true;
@ -4118,7 +4121,6 @@ async function predict(input, config3) {
mesh: prediction.mesh,
meshRaw,
annotations: annotations3,
image: prediction.image,
tensor: prediction.image
});
if (prediction.coords)
@ -7707,7 +7709,7 @@ var HandDetector = class {
const boxes = this.normalizeBoxes(rawBoxes);
tf10.dispose(rawBoxes);
const filteredT = await tf10.image.nonMaxSuppressionAsync(boxes, scores, config3.hand.maxDetected, config3.hand.iouThreshold, config3.hand.minConfidence);
const filtered = filteredT.arraySync();
const filtered = await filteredT.array();
tf10.dispose(scoresT);
tf10.dispose(filteredT);
const hands = [];
@ -7737,7 +7739,7 @@ var HandDetector = class {
const boxes = prediction.box.dataSync();
const startPoint = boxes.slice(0, 2);
const endPoint = boxes.slice(2, 4);
const palmLandmarks = prediction.palmLandmarks.arraySync();
const palmLandmarks = await prediction.palmLandmarks.array();
tf10.dispose(prediction.box);
tf10.dispose(prediction.palmLandmarks);
hands.push(scaleBoxCoordinates2({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));
@ -7911,7 +7913,7 @@ var HandPipeline = class {
tf11.dispose(confidenceT);
if (confidence >= config3.hand.minConfidence) {
const keypointsReshaped = tf11.reshape(keypoints3, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync();
const rawCoords = await keypointsReshaped.array();
tf11.dispose(keypoints3);
tf11.dispose(keypointsReshaped);
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
@ -8310,7 +8312,7 @@ async function predict8(image18, config3) {
tf15.dispose(tensor2);
if (resT) {
keypoints2.length = 0;
const res = resT.arraySync();
const res = await resT.array();
tf15.dispose(resT);
const kpt3 = res[0][0];
for (let id = 0; id < kpt3.length; id++) {
@ -8463,14 +8465,14 @@ async function process2(res, inputSize, outputShape, config3) {
let id = 0;
let results = [];
for (const strideSize of [1, 2, 4]) {
tf16.tidy(() => {
tf16.tidy(async () => {
var _a, _b;
const baseSize = strideSize * 13;
const scoresT = (_a = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] === labels.length)) == null ? void 0 : _a.squeeze();
const featuresT = (_b = res.find((a) => a.shape[1] === baseSize ** 2 && a.shape[2] < labels.length)) == null ? void 0 : _b.squeeze();
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]);
const boxIdx = boxesMax.argMax(2).arraySync();
const scores = scoresT.arraySync();
const boxIdx = await boxesMax.argMax(2).array();
const scores = await scoresT.array();
for (let i = 0; i < scoresT.shape[0]; i++) {
for (let j = 0; j < scoresT.shape[1]; j++) {
const score3 = scores[i][j];
@ -8567,15 +8569,15 @@ async function process3(res, inputSize, outputShape, config3) {
if (!res)
return [];
const results = [];
const detections = res.arraySync();
const detections = await res.array();
const squeezeT = tf17.squeeze(res);
tf17.dispose(res);
const arr = tf17.split(squeezeT, 6, 1);
tf17.dispose(squeezeT);
const stackT = tf17.stack([arr[1], arr[0], arr[3], arr[2]], 1);
const boxesT = stackT.squeeze();
const scoresT = arr[4].squeeze();
const classesT = arr[5].squeeze();
const boxesT = tf17.squeeze(stackT);
const scoresT = tf17.squeeze(arr[4]);
const classesT = tf17.squeeze(arr[5]);
arr.forEach((t) => tf17.dispose(t));
const nmsT = await tf17.image.nonMaxSuppressionAsync(boxesT, scoresT, config3.object.maxDetected, config3.object.iouThreshold, config3.object.minConfidence);
tf17.dispose(boxesT);
@ -9765,28 +9767,28 @@ var detectFace = async (parent, input) => {
return [];
for (let i = 0; i < faces.length; i++) {
parent.analyze("Get Face");
if (!faces[i].image || faces[i].image["isDisposedInternal"]) {
log("Face object is disposed:", faces[i].image);
if (!faces[i].tensor || faces[i].tensor["isDisposedInternal"]) {
log("Face object is disposed:", faces[i].tensor);
continue;
}
const rotation = calculateFaceAngle(faces[i], [input.shape[2], input.shape[1]]);
parent.analyze("Start Emotion:");
if (parent.config.async) {
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
emotionRes = parent.config.face.emotion.enabled ? predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
} else {
parent.state = "run:emotion";
timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : {};
emotionRes = parent.config.face.emotion.enabled ? await predict3(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : {};
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze("End Emotion:");
parent.analyze("Start Description:");
if (parent.config.async) {
descRes = parent.config.face.description.enabled ? predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
descRes = parent.config.face.description.enabled ? predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
} else {
parent.state = "run:description";
timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict2(faces[i].image || tf20.tensor([]), parent.config, i, faces.length) : [];
descRes = parent.config.face.description.enabled ? await predict2(faces[i].tensor || tf20.tensor([]), parent.config, i, faces.length) : [];
parent.performance.embedding = Math.trunc(now() - timeStamp);
}
parent.analyze("End Description:");
@ -9799,6 +9801,10 @@ var detectFace = async (parent, input) => {
delete faces[i].annotations.rightEyeIris;
}
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
const tensor2 = parent.config.face.detector.return ? tf20.squeeze(faces[i].tensor) : null;
tf20.dispose(faces[i].tensor);
if (faces[i].tensor)
delete faces[i].tensor;
faceRes.push({
...faces[i],
id: i,
@ -9809,11 +9815,8 @@ var detectFace = async (parent, input) => {
emotion: emotionRes,
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation,
tensor: parent.config.face.detector.return ? tf20.squeeze(faces[i].image) : null
tensor: tensor2
});
tf20.dispose(faces[i].image);
if (faces[i].image)
delete faces[i].image;
parent.analyze("End Face");
}
parent.analyze("End FaceMesh:");

File diff suppressed because one or more lines are too long

View File

@ -59,15 +59,16 @@
"@tensorflow/tfjs-backend-cpu": "^3.8.0",
"@tensorflow/tfjs-backend-wasm": "^3.8.0",
"@tensorflow/tfjs-backend-webgl": "^3.8.0",
"@tensorflow/tfjs-backend-webgpu": "^0.0.1-alpha.7",
"@tensorflow/tfjs-converter": "^3.8.0",
"@tensorflow/tfjs-core": "^3.8.0",
"@tensorflow/tfjs-data": "^3.8.0",
"@tensorflow/tfjs-layers": "^3.8.0",
"@tensorflow/tfjs-node": "^3.8.0",
"@tensorflow/tfjs-node-gpu": "^3.8.0",
"@types/node": "^16.4.13",
"@typescript-eslint/eslint-plugin": "^4.29.0",
"@typescript-eslint/parser": "^4.29.0",
"@types/node": "^16.4.14",
"@typescript-eslint/eslint-plugin": "^4.29.1",
"@typescript-eslint/parser": "^4.29.1",
"@vladmandic/pilogger": "^0.2.18",
"canvas": "^2.8.0",
"chokidar": "^3.5.2",

View File

@ -1,22 +1,22 @@
2021-08-09 08:50:50 INFO:  @vladmandic/human version 2.1.2
2021-08-09 08:50:50 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.5.0
2021-08-09 08:50:50 INFO:  Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.19","typescript":"4.3.5","typedoc":"0.21.5","eslint":"7.32.0"}
2021-08-09 08:50:50 INFO:  Clean: ["dist/*","types/*","typedoc/*"]
2021-08-09 08:50:50 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-08-09 08:50:50 STATE: target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
2021-08-09 08:50:50 STATE: target: node type: node: {"imports":42,"importBytes":435865,"outputBytes":377825,"outputFiles":"dist/human.node.js"}
2021-08-09 08:50:50 STATE: target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
2021-08-09 08:50:51 STATE: target: nodeGPU type: node: {"imports":42,"importBytes":435873,"outputBytes":377829,"outputFiles":"dist/human.node-gpu.js"}
2021-08-09 08:50:51 STATE: target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
2021-08-09 08:50:51 STATE: target: nodeWASM type: node: {"imports":42,"importBytes":435940,"outputBytes":377901,"outputFiles":"dist/human.node-wasm.js"}
2021-08-09 08:50:51 STATE: target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2111,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
2021-08-09 08:50:51 STATE: target: browserNoBundle type: esm: {"imports":42,"importBytes":435804,"outputBytes":247953,"outputFiles":"dist/human.esm-nobundle.js"}
2021-08-09 08:50:51 STATE: target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2111,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
2021-08-09 08:50:51 STATE: target: browserBundle type: iife: {"imports":42,"importBytes":2769263,"outputBytes":1378416,"outputFiles":"dist/human.js"}
2021-08-09 08:50:52 STATE: target: browserBundle type: esm: {"imports":42,"importBytes":2769263,"outputBytes":1378408,"outputFiles":"dist/human.esm.js"}
2021-08-09 08:50:52 INFO:  Running Linter: ["server/","src/","tfjs/","test/","demo/"]
2021-08-09 08:51:14 INFO:  Linter complete: files: 75 errors: 0 warnings: 0
2021-08-09 08:51:14 INFO:  Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-08-09 08:51:14 INFO:  Generate Typings: ["src/human.ts"] outDir: ["types"]
2021-08-09 08:51:28 INFO:  Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
2021-08-09 08:51:42 INFO:  Documentation generated at /home/vlado/dev/human/typedoc 1
2021-08-11 18:57:35 INFO:  @vladmandic/human version 2.1.2
2021-08-11 18:57:35 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.5.0
2021-08-11 18:57:35 INFO:  Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.19","typescript":"4.3.5","typedoc":"0.21.5","eslint":"7.32.0"}
2021-08-11 18:57:35 INFO:  Clean: ["dist/*","types/*","typedoc/*"]
2021-08-11 18:57:35 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-08-11 18:57:35 STATE: target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
2021-08-11 18:57:35 STATE: target: node type: node: {"imports":42,"importBytes":436067,"outputBytes":377952,"outputFiles":"dist/human.node.js"}
2021-08-11 18:57:35 STATE: target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
2021-08-11 18:57:35 STATE: target: nodeGPU type: node: {"imports":42,"importBytes":436075,"outputBytes":377956,"outputFiles":"dist/human.node-gpu.js"}
2021-08-11 18:57:35 STATE: target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
2021-08-11 18:57:35 STATE: target: nodeWASM type: node: {"imports":42,"importBytes":436142,"outputBytes":378028,"outputFiles":"dist/human.node-wasm.js"}
2021-08-11 18:57:35 STATE: target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2168,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
2021-08-11 18:57:35 STATE: target: browserNoBundle type: esm: {"imports":42,"importBytes":436006,"outputBytes":248005,"outputFiles":"dist/human.esm-nobundle.js"}
2021-08-11 18:57:36 STATE: target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2168,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
2021-08-11 18:57:36 STATE: target: browserBundle type: iife: {"imports":42,"importBytes":2769465,"outputBytes":1378447,"outputFiles":"dist/human.js"}
2021-08-11 18:57:36 STATE: target: browserBundle type: esm: {"imports":42,"importBytes":2769465,"outputBytes":1378439,"outputFiles":"dist/human.esm.js"}
2021-08-11 18:57:36 INFO:  Running Linter: ["server/","src/","tfjs/","test/","demo/"]
2021-08-11 18:57:59 INFO:  Linter complete: files: 75 errors: 0 warnings: 0
2021-08-11 18:57:59 INFO:  Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-08-11 18:57:59 INFO:  Generate Typings: ["src/human.ts"] outDir: ["types"]
2021-08-11 18:58:13 INFO:  Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
2021-08-11 18:58:27 INFO:  Documentation generated at /home/vlado/dev/human/typedoc 1

View File

@ -64,7 +64,7 @@ export class BlazeFaceModel {
this.config = mergeDeep(this.config, userConfig) as Config;
const nmsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
const nms = nmsTensor.arraySync();
const nms = await nmsTensor.array();
tf.dispose(nmsTensor);
const annotatedBoxes: Array<{ box: { startPoint: Tensor, endPoint: Tensor }, landmarks: Tensor, anchor: number[], confidence: number }> = [];
for (let i = 0; i < nms.length; i++) {

View File

@ -51,7 +51,6 @@ export async function predict(input: Tensor, config: Config): Promise<Face[]> {
mesh: prediction.mesh,
meshRaw,
annotations,
image: prediction.image,
tensor: prediction.image,
});
if (prediction.coords) tf.dispose(prediction.coords);

View File

@ -168,7 +168,10 @@ export class Pipeline {
this.storedBoxes = [];
this.detectedFaces = 0;
for (const possible of detector.boxes) {
this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks.arraySync(), confidence: possible.confidence });
const startPoint = await possible.box.startPoint.data();
const endPoint = await possible.box.endPoint.data();
const landmarks = await possible.landmarks.array();
this.storedBoxes.push({ startPoint, endPoint, landmarks, confidence: possible.confidence });
}
if (this.storedBoxes.length > 0) useFreshBox = true;
}

View File

@ -163,8 +163,8 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
// is something went wrong, skip the face
// @ts-ignore possibly undefined
if (!faces[i].image || faces[i].image['isDisposedInternal']) {
log('Face object is disposed:', faces[i].image);
if (!faces[i].tensor || faces[i].tensor['isDisposedInternal']) {
log('Face object is disposed:', faces[i].tensor);
continue;
}
@ -173,11 +173,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
// run emotion, inherits face from blazeface
parent.analyze('Start Emotion:');
if (parent.config.async) {
emotionRes = parent.config.face.emotion.enabled ? emotion.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : {};
emotionRes = parent.config.face.emotion.enabled ? emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
} else {
parent.state = 'run:emotion';
timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : {};
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze('End Emotion:');
@ -186,11 +186,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
/*
parent.analyze('Start GEAR:');
if (parent.config.async) {
gearRes = parent.config.face.agegenderrace.enabled ? agegenderrace.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : {};
gearRes = parent.config.face.agegenderrace.enabled ? agegenderrace.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
} else {
parent.state = 'run:gear';
timeStamp = now();
gearRes = parent.config.face.agegenderrace.enabled ? await agegenderrace.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : {};
gearRes = parent.config.face.agegenderrace.enabled ? await agegenderrace.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze('End GEAR:');
@ -199,11 +199,11 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
// run emotion, inherits face from blazeface
parent.analyze('Start Description:');
if (parent.config.async) {
descRes = parent.config.face.description.enabled ? faceres.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : [];
descRes = parent.config.face.description.enabled ? faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : [];
} else {
parent.state = 'run:description';
timeStamp = now();
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].image || tf.tensor([]), parent.config, i, faces.length) : [];
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : [];
parent.performance.embedding = Math.trunc(now() - timeStamp);
}
parent.analyze('End Description:');
@ -226,6 +226,12 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2]
: 0;
// optionally return tensor
const tensor = parent.config.face.detector.return ? tf.squeeze(faces[i].tensor) : null;
// dispose original face tensor
tf.dispose(faces[i].tensor);
// delete temp face image
if (faces[i].tensor) delete faces[i].tensor;
// combine results
faceRes.push({
...faces[i],
@ -237,12 +243,8 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
emotion: emotionRes,
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation,
tensor: parent.config.face.detector.return ? tf.squeeze(faces[i].image) : null,
tensor,
});
// dispose original face tensor
tf.dispose(faces[i].image);
// delete temp face image
if (faces[i].image) delete faces[i].image;
parent.analyze('End Face');
}

View File

@ -50,7 +50,7 @@ export class HandDetector {
const boxes = this.normalizeBoxes(rawBoxes);
tf.dispose(rawBoxes);
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);
const filtered = filteredT.arraySync();
const filtered = await filteredT.array();
tf.dispose(scoresT);
tf.dispose(filteredT);
@ -81,7 +81,7 @@ export class HandDetector {
const boxes = prediction.box.dataSync();
const startPoint = boxes.slice(0, 2);
const endPoint = boxes.slice(2, 4);
const palmLandmarks = prediction.palmLandmarks.arraySync();
const palmLandmarks = await prediction.palmLandmarks.array();
tf.dispose(prediction.box);
tf.dispose(prediction.palmLandmarks);
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / this.inputSize, inputHeight / this.inputSize]));

View File

@ -122,7 +122,7 @@ export class HandPipeline {
tf.dispose(confidenceT);
if (confidence >= config.hand.minConfidence) {
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync();
const rawCoords = await keypointsReshaped.array();
tf.dispose(keypoints);
tf.dispose(keypointsReshaped);
const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);

View File

@ -50,7 +50,7 @@ export async function predict(image: Tensor, config: Config): Promise<Body[]> {
if (resT) {
keypoints.length = 0;
const res = resT.arraySync();
const res = await resT.array();
tf.dispose(resT);
const kpt = res[0][0];
for (let id = 0; id < kpt.length; id++) {

View File

@ -28,15 +28,15 @@ export async function load(config: Config): Promise<GraphModel> {
async function process(res: Tensor, inputSize, outputShape, config: Config) {
if (!res) return [];
const results: Array<Item> = [];
const detections = res.arraySync();
const detections = await res.array();
const squeezeT = tf.squeeze(res);
tf.dispose(res);
const arr = tf.split(squeezeT, 6, 1); // x1, y1, x2, y2, score, class
tf.dispose(squeezeT);
const stackT = tf.stack([arr[1], arr[0], arr[3], arr[2]], 1); // reorder dims as tf.nms expects y, x
const boxesT = stackT.squeeze();
const scoresT = arr[4].squeeze();
const classesT = arr[5].squeeze();
const boxesT = tf.squeeze(stackT);
const scoresT = tf.squeeze(arr[4]);
const classesT = tf.squeeze(arr[5]);
arr.forEach((t) => tf.dispose(t));
const nmsT = await tf.image.nonMaxSuppressionAsync(boxesT, scoresT, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
tf.dispose(boxesT);

View File

@ -32,14 +32,14 @@ async function process(res, inputSize, outputShape, config) {
let results: Array<Item> = [];
for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects
// find scores, boxes, classes
tf.tidy(() => { // wrap in tidy to automatically deallocate temp tensors
tf.tidy(async () => { // wrap in tidy to automatically deallocate temp tensors
const baseSize = strideSize * 13; // 13x13=169, 26x26=676, 52x52=2704
// find boxes and scores output depending on stride
const scoresT = res.find((a) => (a.shape[1] === (baseSize ** 2) && a.shape[2] === labels.length))?.squeeze();
const featuresT = res.find((a) => (a.shape[1] === (baseSize ** 2) && a.shape[2] < labels.length))?.squeeze();
const boxesMax = featuresT.reshape([-1, 4, featuresT.shape[1] / 4]); // reshape [output] to [4, output / 4] where number is number of different features inside each stride
const boxIdx = boxesMax.argMax(2).arraySync(); // what we need is indexes of features with highest scores, not values itself
const scores = scoresT.arraySync(); // optionally use exponential scores or just as-is
const boxIdx = await boxesMax.argMax(2).array(); // what we need is indexes of features with highest scores, not values itself
const scores = await scoresT.array(); // optionally use exponential scores or just as-is
for (let i = 0; i < scoresT.shape[0]; i++) { // total strides (x * y matrix)
for (let j = 0; j < scoresT.shape[1]; j++) { // one score for each class
const score = scores[i][j]; // get score for current position

View File

@ -53,8 +53,7 @@ export interface Face {
matrix: [number, number, number, number, number, number, number, number, number],
gaze: { bearing: number, strength: number },
}
image?: Tensor;
tensor: Tensor,
tensor?: Tensor,
}
/** Body results

View File

@ -3,7 +3,6 @@
* @external
*/
// import from src
// get versions of all packages
import { version as tfjsVersion } from '@tensorflow/tfjs/package.json';
import { version as tfjsCoreVersion } from '@tensorflow/tfjs-core/package.json';
@ -14,7 +13,7 @@ import { version as tfjsBackendCPUVersion } from '@tensorflow/tfjs-backend-cpu/p
import { version as tfjsBackendWebGLVersion } from '@tensorflow/tfjs-backend-webgl/package.json';
import { version as tfjsBackendWASMVersion } from '@tensorflow/tfjs-backend-wasm/package.json';
// export all
// export all from sources
// requires treeShaking:ignore-annotations due to tfjs misconfiguration
/*
export * from '@tensorflow/tfjs-core/src/index';
@ -26,7 +25,7 @@ export * from '@tensorflow/tfjs-backend-webgl/src/index';
export * from '@tensorflow/tfjs-backend-wasm/src/index';
*/
// export all
// export all from build
export * from '@tensorflow/tfjs-core/dist/index.js';
export * from '@tensorflow/tfjs-layers/dist/index.js';
export * from '@tensorflow/tfjs-converter/dist/index.js';
@ -34,6 +33,7 @@ export * as data from '@tensorflow/tfjs-data/dist/index.js';
export * from '@tensorflow/tfjs-backend-cpu/dist/index.js';
export * from '@tensorflow/tfjs-backend-webgl/dist/index.js';
export * from '@tensorflow/tfjs-backend-wasm/dist/index.js';
// export * from '@tensorflow/tfjs-backend-webgpu/dist/index.js'; // experimental
// export versions
export const version = {
@ -46,4 +46,3 @@ export const version = {
'tfjs-backend-webgl': tfjsBackendWebGLVersion,
'tfjs-backend-wasm': tfjsBackendWASMVersion,
};
// export const version = {};

File diff suppressed because one or more lines are too long

View File

@ -122,7 +122,6 @@
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#gender" class="tsd-kind-icon">gender</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#genderScore" class="tsd-kind-icon">gender<wbr>Score</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#id" class="tsd-kind-icon">id</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#image" class="tsd-kind-icon">image</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#iris" class="tsd-kind-icon">iris</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#mesh" class="tsd-kind-icon">mesh</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="Face.html#meshRaw" class="tsd-kind-icon">mesh<wbr>Raw</a></li>
@ -213,13 +212,6 @@
<aside class="tsd-sources">
</aside>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="image" class="tsd-anchor"></a>
<h3><span class="tsd-flag ts-flagOptional">Optional</span> image</h3>
<div class="tsd-signature tsd-kind-icon">image<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span></div>
<aside class="tsd-sources">
</aside>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="iris" class="tsd-anchor"></a>
<h3><span class="tsd-flag ts-flagOptional">Optional</span> iris</h3>
@ -290,7 +282,7 @@
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="tensor" class="tsd-anchor"></a>
<h3>tensor</h3>
<h3><span class="tsd-flag ts-flagOptional">Optional</span> tensor</h3>
<div class="tsd-signature tsd-kind-icon">tensor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span></div>
<aside class="tsd-sources">
</aside>
@ -360,9 +352,6 @@
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="Face.html#id" class="tsd-kind-icon">id</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="Face.html#image" class="tsd-kind-icon">image</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="Face.html#iris" class="tsd-kind-icon">iris</a>
</li>

View File

@ -61,8 +61,7 @@ export interface Face {
strength: number;
};
};
image?: Tensor;
tensor: Tensor;
tensor?: Tensor;
}
/** Body results
*

2
wiki

@ -1 +1 @@
Subproject commit 2135debf198b5b0ecb670896bef837cbb45fe32e
Subproject commit bdc4077a3df07abdf4a2d5b2d2beadf2e573e8d8