mirror of https://github.com/vladmandic/human
implemented movenet-multipose model
parent
ed4c2c6efa
commit
348176b180
|
@ -9,12 +9,13 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/08/19 mandic00@live.com
|
||||
|
||||
|
||||
### **2.1.4** 2021/08/19 mandic00@live.com
|
||||
|
||||
- add static type definitions to main class
|
||||
|
||||
### **origin/main** 2021/08/18 mandic00@live.com
|
||||
|
||||
- fix interpolation overflow
|
||||
- rebuild full
|
||||
- improve face box caching
|
||||
- strict type checks
|
||||
|
|
16
TODO.md
16
TODO.md
|
@ -7,10 +7,6 @@ WebGL shader optimizations for faster load and initial detection
|
|||
- Implement WebGL uniforms for shaders: <https://github.com/tensorflow/tfjs/issues/5205>
|
||||
- Fix shader packing: <https://github.com/tensorflow/tfjs/issues/5343>
|
||||
|
||||
MoveNet MultiPose Model: <https://github.com/vladmandic/movenet>
|
||||
|
||||
- Implementation is ready, but model is 2x size and 0.5x performance
|
||||
|
||||
<br>
|
||||
|
||||
## Exploring
|
||||
|
@ -45,16 +41,24 @@ Feature is automatically disabled in NodeJS without user impact
|
|||
|
||||
- Backend NodeJS missing kernel op `FlipLeftRight`
|
||||
<https://github.com/tensorflow/tfjs/issues/4066>
|
||||
*Target: `Human` v2.2 with `TFJS` v3.9*
|
||||
*Target: `Human` v2.2 with `TFJS` v3.9*
|
||||
- Backend NodeJS missing kernel op `RotateWithOffset`
|
||||
<https://github.com/tensorflow/tfjs/issues/5473>
|
||||
*Target: N/A*
|
||||
*Target: N/A*
|
||||
|
||||
Hand detection using WASM backend has reduced precision due to math rounding errors in backend
|
||||
*Target: N/A*
|
||||
|
||||
<br>
|
||||
|
||||
### Body Detection
|
||||
|
||||
MoveNet MultiPose model does not work with WASM backend due to missing F32 implementation
|
||||
|
||||
- Backend WASM missing F32 implementation
|
||||
<https://github.com/tensorflow/tfjs/issues/5516>
|
||||
*Target: N/A*
|
||||
|
||||
### Object Detection
|
||||
|
||||
Object detection using CenterNet or NanoDet models is not working when using WASM backend due to missing kernel ops in TFJS
|
||||
|
|
|
@ -51,6 +51,7 @@ let userConfig = {
|
|||
gesture: { enabled: false },
|
||||
hand: { enabled: false },
|
||||
body: { enabled: false },
|
||||
// body: { enabled: true, modelPath: 'movenet-multipose.json' },
|
||||
// body: { enabled: true, modelPath: 'posenet.json' },
|
||||
segmentation: { enabled: false },
|
||||
*/
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -8307,6 +8307,88 @@ async function load9(config3) {
|
|||
log("cached model:", model6["modelUrl"]);
|
||||
return model6;
|
||||
}
|
||||
async function parseSinglePose(res, config3, image18) {
|
||||
keypoints2.length = 0;
|
||||
const kpt3 = res[0][0];
|
||||
for (let id = 0; id < kpt3.length; id++) {
|
||||
score2 = kpt3[id][2];
|
||||
if (score2 > config3.body.minConfidence) {
|
||||
keypoints2.push({
|
||||
score: Math.round(100 * score2) / 100,
|
||||
part: bodyParts2[id],
|
||||
positionRaw: [
|
||||
kpt3[id][1],
|
||||
kpt3[id][0]
|
||||
],
|
||||
position: [
|
||||
Math.round((image18.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image18.shape[1] || 0) * kpt3[id][0])
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
score2 = keypoints2.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
|
||||
const x = keypoints2.map((a) => a.position[0]);
|
||||
const y = keypoints2.map((a) => a.position[1]);
|
||||
box5 = [
|
||||
Math.min(...x),
|
||||
Math.min(...y),
|
||||
Math.max(...x) - Math.min(...x),
|
||||
Math.max(...y) - Math.min(...y)
|
||||
];
|
||||
const xRaw = keypoints2.map((a) => a.positionRaw[0]);
|
||||
const yRaw = keypoints2.map((a) => a.positionRaw[1]);
|
||||
boxRaw2 = [
|
||||
Math.min(...xRaw),
|
||||
Math.min(...yRaw),
|
||||
Math.max(...xRaw) - Math.min(...xRaw),
|
||||
Math.max(...yRaw) - Math.min(...yRaw)
|
||||
];
|
||||
const persons2 = [];
|
||||
persons2.push({ id: 0, score: score2, box: box5, boxRaw: boxRaw2, keypoints: keypoints2 });
|
||||
return persons2;
|
||||
}
|
||||
async function parseMultiPose(res, config3, image18) {
|
||||
const persons2 = [];
|
||||
for (let p = 0; p < res[0].length; p++) {
|
||||
const kpt3 = res[0][p];
|
||||
score2 = Math.round(100 * kpt3[51 + 4]) / 100;
|
||||
if (score2 < config3.body.minConfidence)
|
||||
continue;
|
||||
keypoints2.length = 0;
|
||||
for (let i = 0; i < 17; i++) {
|
||||
const partScore = Math.round(100 * kpt3[3 * i + 2]) / 100;
|
||||
if (partScore > config3.body.minConfidence) {
|
||||
keypoints2.push({
|
||||
part: bodyParts2[i],
|
||||
score: partScore,
|
||||
positionRaw: [
|
||||
kpt3[3 * i + 1],
|
||||
kpt3[3 * i + 0]
|
||||
],
|
||||
position: [
|
||||
Math.trunc(kpt3[3 * i + 1] * (image18.shape[2] || 0)),
|
||||
Math.trunc(kpt3[3 * i + 0] * (image18.shape[1] || 0))
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
boxRaw2 = [kpt3[51 + 1], kpt3[51 + 0], kpt3[51 + 3] - kpt3[51 + 1], kpt3[51 + 2] - kpt3[51 + 0]];
|
||||
persons2.push({
|
||||
id: p,
|
||||
score: score2,
|
||||
boxRaw: boxRaw2,
|
||||
box: [
|
||||
Math.trunc(boxRaw2[0] * (image18.shape[2] || 0)),
|
||||
Math.trunc(boxRaw2[1] * (image18.shape[1] || 0)),
|
||||
Math.trunc(boxRaw2[2] * (image18.shape[2] || 0)),
|
||||
Math.trunc(boxRaw2[3] * (image18.shape[1] || 0))
|
||||
],
|
||||
keypoints: keypoints2
|
||||
});
|
||||
}
|
||||
return persons2;
|
||||
}
|
||||
async function predict8(image18, config3) {
|
||||
if (skipped4 < config3.body.skipFrames && config3.skipFrame && Object.keys(keypoints2).length > 0) {
|
||||
skipped4++;
|
||||
|
@ -8317,7 +8399,10 @@ async function predict8(image18, config3) {
|
|||
const tensor2 = tf15.tidy(() => {
|
||||
if (!model6.inputs[0].shape)
|
||||
return null;
|
||||
const resize = tf15.image.resizeBilinear(image18, [model6.inputs[0].shape[2], model6.inputs[0].shape[1]], false);
|
||||
let inputSize = model6.inputs[0].shape[2];
|
||||
if (inputSize === -1)
|
||||
inputSize = 256;
|
||||
const resize = tf15.image.resizeBilinear(image18, [inputSize, inputSize], false);
|
||||
const cast4 = tf15.cast(resize, "int32");
|
||||
return cast4;
|
||||
});
|
||||
|
@ -8325,47 +8410,16 @@ async function predict8(image18, config3) {
|
|||
if (config3.body.enabled)
|
||||
resT = await model6.predict(tensor2);
|
||||
tf15.dispose(tensor2);
|
||||
if (resT) {
|
||||
keypoints2.length = 0;
|
||||
const res = await resT.array();
|
||||
tf15.dispose(resT);
|
||||
const kpt3 = res[0][0];
|
||||
for (let id = 0; id < kpt3.length; id++) {
|
||||
score2 = kpt3[id][2];
|
||||
if (score2 > config3.body.minConfidence) {
|
||||
keypoints2.push({
|
||||
score: Math.round(100 * score2) / 100,
|
||||
part: bodyParts2[id],
|
||||
positionRaw: [
|
||||
kpt3[id][1],
|
||||
kpt3[id][0]
|
||||
],
|
||||
position: [
|
||||
Math.round((image18.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image18.shape[1] || 0) * kpt3[id][0])
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
score2 = keypoints2.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
|
||||
const x = keypoints2.map((a) => a.position[0]);
|
||||
const y = keypoints2.map((a) => a.position[1]);
|
||||
box5 = [
|
||||
Math.min(...x),
|
||||
Math.min(...y),
|
||||
Math.max(...x) - Math.min(...x),
|
||||
Math.max(...y) - Math.min(...y)
|
||||
];
|
||||
const xRaw = keypoints2.map((a) => a.positionRaw[0]);
|
||||
const yRaw = keypoints2.map((a) => a.positionRaw[1]);
|
||||
boxRaw2 = [
|
||||
Math.min(...xRaw),
|
||||
Math.min(...yRaw),
|
||||
Math.max(...xRaw) - Math.min(...xRaw),
|
||||
Math.max(...yRaw) - Math.min(...yRaw)
|
||||
];
|
||||
resolve([{ id: 0, score: score2, box: box5, boxRaw: boxRaw2, keypoints: keypoints2 }]);
|
||||
if (!resT)
|
||||
resolve([]);
|
||||
const res = await resT.array();
|
||||
let persons2;
|
||||
if (resT.shape[2] === 17)
|
||||
persons2 = await parseSinglePose(res, config3, image18);
|
||||
else if (resT.shape[2] === 56)
|
||||
persons2 = await parseMultiPose(res, config3, image18);
|
||||
tf15.dispose(resT);
|
||||
resolve(persons2);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -8308,6 +8308,88 @@ async function load9(config3) {
|
|||
log("cached model:", model6["modelUrl"]);
|
||||
return model6;
|
||||
}
|
||||
async function parseSinglePose(res, config3, image18) {
|
||||
keypoints2.length = 0;
|
||||
const kpt3 = res[0][0];
|
||||
for (let id = 0; id < kpt3.length; id++) {
|
||||
score2 = kpt3[id][2];
|
||||
if (score2 > config3.body.minConfidence) {
|
||||
keypoints2.push({
|
||||
score: Math.round(100 * score2) / 100,
|
||||
part: bodyParts2[id],
|
||||
positionRaw: [
|
||||
kpt3[id][1],
|
||||
kpt3[id][0]
|
||||
],
|
||||
position: [
|
||||
Math.round((image18.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image18.shape[1] || 0) * kpt3[id][0])
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
score2 = keypoints2.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
|
||||
const x = keypoints2.map((a) => a.position[0]);
|
||||
const y = keypoints2.map((a) => a.position[1]);
|
||||
box5 = [
|
||||
Math.min(...x),
|
||||
Math.min(...y),
|
||||
Math.max(...x) - Math.min(...x),
|
||||
Math.max(...y) - Math.min(...y)
|
||||
];
|
||||
const xRaw = keypoints2.map((a) => a.positionRaw[0]);
|
||||
const yRaw = keypoints2.map((a) => a.positionRaw[1]);
|
||||
boxRaw2 = [
|
||||
Math.min(...xRaw),
|
||||
Math.min(...yRaw),
|
||||
Math.max(...xRaw) - Math.min(...xRaw),
|
||||
Math.max(...yRaw) - Math.min(...yRaw)
|
||||
];
|
||||
const persons2 = [];
|
||||
persons2.push({ id: 0, score: score2, box: box5, boxRaw: boxRaw2, keypoints: keypoints2 });
|
||||
return persons2;
|
||||
}
|
||||
async function parseMultiPose(res, config3, image18) {
|
||||
const persons2 = [];
|
||||
for (let p = 0; p < res[0].length; p++) {
|
||||
const kpt3 = res[0][p];
|
||||
score2 = Math.round(100 * kpt3[51 + 4]) / 100;
|
||||
if (score2 < config3.body.minConfidence)
|
||||
continue;
|
||||
keypoints2.length = 0;
|
||||
for (let i = 0; i < 17; i++) {
|
||||
const partScore = Math.round(100 * kpt3[3 * i + 2]) / 100;
|
||||
if (partScore > config3.body.minConfidence) {
|
||||
keypoints2.push({
|
||||
part: bodyParts2[i],
|
||||
score: partScore,
|
||||
positionRaw: [
|
||||
kpt3[3 * i + 1],
|
||||
kpt3[3 * i + 0]
|
||||
],
|
||||
position: [
|
||||
Math.trunc(kpt3[3 * i + 1] * (image18.shape[2] || 0)),
|
||||
Math.trunc(kpt3[3 * i + 0] * (image18.shape[1] || 0))
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
boxRaw2 = [kpt3[51 + 1], kpt3[51 + 0], kpt3[51 + 3] - kpt3[51 + 1], kpt3[51 + 2] - kpt3[51 + 0]];
|
||||
persons2.push({
|
||||
id: p,
|
||||
score: score2,
|
||||
boxRaw: boxRaw2,
|
||||
box: [
|
||||
Math.trunc(boxRaw2[0] * (image18.shape[2] || 0)),
|
||||
Math.trunc(boxRaw2[1] * (image18.shape[1] || 0)),
|
||||
Math.trunc(boxRaw2[2] * (image18.shape[2] || 0)),
|
||||
Math.trunc(boxRaw2[3] * (image18.shape[1] || 0))
|
||||
],
|
||||
keypoints: keypoints2
|
||||
});
|
||||
}
|
||||
return persons2;
|
||||
}
|
||||
async function predict8(image18, config3) {
|
||||
if (skipped4 < config3.body.skipFrames && config3.skipFrame && Object.keys(keypoints2).length > 0) {
|
||||
skipped4++;
|
||||
|
@ -8318,7 +8400,10 @@ async function predict8(image18, config3) {
|
|||
const tensor2 = tf15.tidy(() => {
|
||||
if (!model6.inputs[0].shape)
|
||||
return null;
|
||||
const resize = tf15.image.resizeBilinear(image18, [model6.inputs[0].shape[2], model6.inputs[0].shape[1]], false);
|
||||
let inputSize = model6.inputs[0].shape[2];
|
||||
if (inputSize === -1)
|
||||
inputSize = 256;
|
||||
const resize = tf15.image.resizeBilinear(image18, [inputSize, inputSize], false);
|
||||
const cast4 = tf15.cast(resize, "int32");
|
||||
return cast4;
|
||||
});
|
||||
|
@ -8326,47 +8411,16 @@ async function predict8(image18, config3) {
|
|||
if (config3.body.enabled)
|
||||
resT = await model6.predict(tensor2);
|
||||
tf15.dispose(tensor2);
|
||||
if (resT) {
|
||||
keypoints2.length = 0;
|
||||
const res = await resT.array();
|
||||
tf15.dispose(resT);
|
||||
const kpt3 = res[0][0];
|
||||
for (let id = 0; id < kpt3.length; id++) {
|
||||
score2 = kpt3[id][2];
|
||||
if (score2 > config3.body.minConfidence) {
|
||||
keypoints2.push({
|
||||
score: Math.round(100 * score2) / 100,
|
||||
part: bodyParts2[id],
|
||||
positionRaw: [
|
||||
kpt3[id][1],
|
||||
kpt3[id][0]
|
||||
],
|
||||
position: [
|
||||
Math.round((image18.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image18.shape[1] || 0) * kpt3[id][0])
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
score2 = keypoints2.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
|
||||
const x = keypoints2.map((a) => a.position[0]);
|
||||
const y = keypoints2.map((a) => a.position[1]);
|
||||
box5 = [
|
||||
Math.min(...x),
|
||||
Math.min(...y),
|
||||
Math.max(...x) - Math.min(...x),
|
||||
Math.max(...y) - Math.min(...y)
|
||||
];
|
||||
const xRaw = keypoints2.map((a) => a.positionRaw[0]);
|
||||
const yRaw = keypoints2.map((a) => a.positionRaw[1]);
|
||||
boxRaw2 = [
|
||||
Math.min(...xRaw),
|
||||
Math.min(...yRaw),
|
||||
Math.max(...xRaw) - Math.min(...xRaw),
|
||||
Math.max(...yRaw) - Math.min(...yRaw)
|
||||
];
|
||||
resolve([{ id: 0, score: score2, box: box5, boxRaw: boxRaw2, keypoints: keypoints2 }]);
|
||||
if (!resT)
|
||||
resolve([]);
|
||||
const res = await resT.array();
|
||||
let persons2;
|
||||
if (resT.shape[2] === 17)
|
||||
persons2 = await parseSinglePose(res, config3, image18);
|
||||
else if (resT.shape[2] === 56)
|
||||
persons2 = await parseMultiPose(res, config3, image18);
|
||||
tf15.dispose(resT);
|
||||
resolve(persons2);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -8307,6 +8307,88 @@ async function load9(config3) {
|
|||
log("cached model:", model6["modelUrl"]);
|
||||
return model6;
|
||||
}
|
||||
async function parseSinglePose(res, config3, image18) {
|
||||
keypoints2.length = 0;
|
||||
const kpt3 = res[0][0];
|
||||
for (let id = 0; id < kpt3.length; id++) {
|
||||
score2 = kpt3[id][2];
|
||||
if (score2 > config3.body.minConfidence) {
|
||||
keypoints2.push({
|
||||
score: Math.round(100 * score2) / 100,
|
||||
part: bodyParts2[id],
|
||||
positionRaw: [
|
||||
kpt3[id][1],
|
||||
kpt3[id][0]
|
||||
],
|
||||
position: [
|
||||
Math.round((image18.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image18.shape[1] || 0) * kpt3[id][0])
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
score2 = keypoints2.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
|
||||
const x = keypoints2.map((a) => a.position[0]);
|
||||
const y = keypoints2.map((a) => a.position[1]);
|
||||
box5 = [
|
||||
Math.min(...x),
|
||||
Math.min(...y),
|
||||
Math.max(...x) - Math.min(...x),
|
||||
Math.max(...y) - Math.min(...y)
|
||||
];
|
||||
const xRaw = keypoints2.map((a) => a.positionRaw[0]);
|
||||
const yRaw = keypoints2.map((a) => a.positionRaw[1]);
|
||||
boxRaw2 = [
|
||||
Math.min(...xRaw),
|
||||
Math.min(...yRaw),
|
||||
Math.max(...xRaw) - Math.min(...xRaw),
|
||||
Math.max(...yRaw) - Math.min(...yRaw)
|
||||
];
|
||||
const persons2 = [];
|
||||
persons2.push({ id: 0, score: score2, box: box5, boxRaw: boxRaw2, keypoints: keypoints2 });
|
||||
return persons2;
|
||||
}
|
||||
async function parseMultiPose(res, config3, image18) {
|
||||
const persons2 = [];
|
||||
for (let p = 0; p < res[0].length; p++) {
|
||||
const kpt3 = res[0][p];
|
||||
score2 = Math.round(100 * kpt3[51 + 4]) / 100;
|
||||
if (score2 < config3.body.minConfidence)
|
||||
continue;
|
||||
keypoints2.length = 0;
|
||||
for (let i = 0; i < 17; i++) {
|
||||
const partScore = Math.round(100 * kpt3[3 * i + 2]) / 100;
|
||||
if (partScore > config3.body.minConfidence) {
|
||||
keypoints2.push({
|
||||
part: bodyParts2[i],
|
||||
score: partScore,
|
||||
positionRaw: [
|
||||
kpt3[3 * i + 1],
|
||||
kpt3[3 * i + 0]
|
||||
],
|
||||
position: [
|
||||
Math.trunc(kpt3[3 * i + 1] * (image18.shape[2] || 0)),
|
||||
Math.trunc(kpt3[3 * i + 0] * (image18.shape[1] || 0))
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
boxRaw2 = [kpt3[51 + 1], kpt3[51 + 0], kpt3[51 + 3] - kpt3[51 + 1], kpt3[51 + 2] - kpt3[51 + 0]];
|
||||
persons2.push({
|
||||
id: p,
|
||||
score: score2,
|
||||
boxRaw: boxRaw2,
|
||||
box: [
|
||||
Math.trunc(boxRaw2[0] * (image18.shape[2] || 0)),
|
||||
Math.trunc(boxRaw2[1] * (image18.shape[1] || 0)),
|
||||
Math.trunc(boxRaw2[2] * (image18.shape[2] || 0)),
|
||||
Math.trunc(boxRaw2[3] * (image18.shape[1] || 0))
|
||||
],
|
||||
keypoints: keypoints2
|
||||
});
|
||||
}
|
||||
return persons2;
|
||||
}
|
||||
async function predict8(image18, config3) {
|
||||
if (skipped4 < config3.body.skipFrames && config3.skipFrame && Object.keys(keypoints2).length > 0) {
|
||||
skipped4++;
|
||||
|
@ -8317,7 +8399,10 @@ async function predict8(image18, config3) {
|
|||
const tensor2 = tf15.tidy(() => {
|
||||
if (!model6.inputs[0].shape)
|
||||
return null;
|
||||
const resize = tf15.image.resizeBilinear(image18, [model6.inputs[0].shape[2], model6.inputs[0].shape[1]], false);
|
||||
let inputSize = model6.inputs[0].shape[2];
|
||||
if (inputSize === -1)
|
||||
inputSize = 256;
|
||||
const resize = tf15.image.resizeBilinear(image18, [inputSize, inputSize], false);
|
||||
const cast4 = tf15.cast(resize, "int32");
|
||||
return cast4;
|
||||
});
|
||||
|
@ -8325,47 +8410,16 @@ async function predict8(image18, config3) {
|
|||
if (config3.body.enabled)
|
||||
resT = await model6.predict(tensor2);
|
||||
tf15.dispose(tensor2);
|
||||
if (resT) {
|
||||
keypoints2.length = 0;
|
||||
const res = await resT.array();
|
||||
tf15.dispose(resT);
|
||||
const kpt3 = res[0][0];
|
||||
for (let id = 0; id < kpt3.length; id++) {
|
||||
score2 = kpt3[id][2];
|
||||
if (score2 > config3.body.minConfidence) {
|
||||
keypoints2.push({
|
||||
score: Math.round(100 * score2) / 100,
|
||||
part: bodyParts2[id],
|
||||
positionRaw: [
|
||||
kpt3[id][1],
|
||||
kpt3[id][0]
|
||||
],
|
||||
position: [
|
||||
Math.round((image18.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image18.shape[1] || 0) * kpt3[id][0])
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
score2 = keypoints2.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
|
||||
const x = keypoints2.map((a) => a.position[0]);
|
||||
const y = keypoints2.map((a) => a.position[1]);
|
||||
box5 = [
|
||||
Math.min(...x),
|
||||
Math.min(...y),
|
||||
Math.max(...x) - Math.min(...x),
|
||||
Math.max(...y) - Math.min(...y)
|
||||
];
|
||||
const xRaw = keypoints2.map((a) => a.positionRaw[0]);
|
||||
const yRaw = keypoints2.map((a) => a.positionRaw[1]);
|
||||
boxRaw2 = [
|
||||
Math.min(...xRaw),
|
||||
Math.min(...yRaw),
|
||||
Math.max(...xRaw) - Math.min(...xRaw),
|
||||
Math.max(...yRaw) - Math.min(...yRaw)
|
||||
];
|
||||
resolve([{ id: 0, score: score2, box: box5, boxRaw: boxRaw2, keypoints: keypoints2 }]);
|
||||
if (!resT)
|
||||
resolve([]);
|
||||
const res = await resT.array();
|
||||
let persons2;
|
||||
if (resT.shape[2] === 17)
|
||||
persons2 = await parseSinglePose(res, config3, image18);
|
||||
else if (resT.shape[2] === 56)
|
||||
persons2 = await parseMultiPose(res, config3, image18);
|
||||
tf15.dispose(resT);
|
||||
resolve(persons2);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@
|
|||
"esbuild": "^0.12.21",
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-config-airbnb-base": "^14.2.1",
|
||||
"eslint-plugin-import": "^2.24.0",
|
||||
"eslint-plugin-import": "^2.24.1",
|
||||
"eslint-plugin-json": "^3.1.0",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-promise": "^5.1.0",
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
2021-08-19 16:17:06 [36mINFO: [39m @vladmandic/human version 2.1.4
|
||||
2021-08-19 16:17:06 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-08-19 16:17:06 [36mINFO: [39m Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.21","typescript":"4.3.5","typedoc":"0.21.6","eslint":"7.32.0"}
|
||||
2021-08-19 16:17:06 [36mINFO: [39m Clean: ["dist/*","types/*","typedoc/*"]
|
||||
2021-08-19 16:17:06 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-08-19 16:17:06 [35mSTATE:[39m target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-08-19 16:17:07 [35mSTATE:[39m target: node type: node: {"imports":42,"importBytes":437161,"outputBytes":379821,"outputFiles":"dist/human.node.js"}
|
||||
2021-08-19 16:17:07 [35mSTATE:[39m target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-08-19 16:17:07 [35mSTATE:[39m target: nodeGPU type: node: {"imports":42,"importBytes":437169,"outputBytes":379825,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-08-19 16:17:07 [35mSTATE:[39m target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-08-19 16:17:07 [35mSTATE:[39m target: nodeWASM type: node: {"imports":42,"importBytes":437236,"outputBytes":379897,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-08-19 16:17:07 [35mSTATE:[39m target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2168,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-08-19 16:17:07 [35mSTATE:[39m target: browserNoBundle type: esm: {"imports":42,"importBytes":437100,"outputBytes":249180,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-08-19 16:17:07 [35mSTATE:[39m target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2168,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-08-19 16:17:08 [35mSTATE:[39m target: browserBundle type: iife: {"imports":42,"importBytes":2770559,"outputBytes":1379548,"outputFiles":"dist/human.js"}
|
||||
2021-08-19 16:17:08 [35mSTATE:[39m target: browserBundle type: esm: {"imports":42,"importBytes":2770559,"outputBytes":1379540,"outputFiles":"dist/human.esm.js"}
|
||||
2021-08-19 16:17:08 [36mINFO: [39m Running Linter: ["server/","src/","tfjs/","test/","demo/"]
|
||||
2021-08-19 16:17:30 [36mINFO: [39m Linter complete: files: 77 errors: 0 warnings: 0
|
||||
2021-08-19 16:17:31 [36mINFO: [39m Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-08-19 16:17:31 [36mINFO: [39m Generate Typings: ["src/human.ts"] outDir: ["types"]
|
||||
2021-08-19 16:17:45 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
|
||||
2021-08-19 16:17:59 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
||||
2021-08-20 09:03:11 [36mINFO: [39m @vladmandic/human version 2.1.4
|
||||
2021-08-20 09:03:11 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-08-20 09:03:11 [36mINFO: [39m Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.21","typescript":"4.3.5","typedoc":"0.21.6","eslint":"7.32.0"}
|
||||
2021-08-20 09:03:11 [36mINFO: [39m Clean: ["dist/*","types/*","typedoc/*"]
|
||||
2021-08-20 09:03:11 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-08-20 09:03:12 [35mSTATE:[39m target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-08-20 09:03:12 [35mSTATE:[39m target: node type: node: {"imports":42,"importBytes":438886,"outputBytes":381429,"outputFiles":"dist/human.node.js"}
|
||||
2021-08-20 09:03:12 [35mSTATE:[39m target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-08-20 09:03:12 [35mSTATE:[39m target: nodeGPU type: node: {"imports":42,"importBytes":438894,"outputBytes":381433,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-08-20 09:03:12 [35mSTATE:[39m target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-08-20 09:03:12 [35mSTATE:[39m target: nodeWASM type: node: {"imports":42,"importBytes":438961,"outputBytes":381505,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-08-20 09:03:12 [35mSTATE:[39m target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2168,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-08-20 09:03:12 [35mSTATE:[39m target: browserNoBundle type: esm: {"imports":42,"importBytes":438825,"outputBytes":249942,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-08-20 09:03:12 [35mSTATE:[39m target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2168,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-08-20 09:03:13 [35mSTATE:[39m target: browserBundle type: iife: {"imports":42,"importBytes":2772284,"outputBytes":1380316,"outputFiles":"dist/human.js"}
|
||||
2021-08-20 09:03:13 [35mSTATE:[39m target: browserBundle type: esm: {"imports":42,"importBytes":2772284,"outputBytes":1380308,"outputFiles":"dist/human.esm.js"}
|
||||
2021-08-20 09:03:13 [36mINFO: [39m Running Linter: ["server/","src/","tfjs/","test/","demo/"]
|
||||
2021-08-20 09:03:37 [36mINFO: [39m Linter complete: files: 77 errors: 0 warnings: 0
|
||||
2021-08-20 09:03:37 [36mINFO: [39m Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-08-20 09:03:37 [36mINFO: [39m Generate Typings: ["src/human.ts"] outDir: ["types"]
|
||||
2021-08-20 09:03:51 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
|
||||
2021-08-20 09:04:05 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
||||
|
|
|
@ -11,8 +11,9 @@ import { Config } from '../config';
|
|||
let model: GraphModel;
|
||||
|
||||
type Keypoints = { score: number, part: string, position: [number, number], positionRaw: [number, number] };
|
||||
|
||||
const keypoints: Array<Keypoints> = [];
|
||||
type Person = { id: number, score: number, box: [number, number, number, number], boxRaw: [number, number, number, number], keypoints: Array<Keypoints> }
|
||||
|
||||
let box: [number, number, number, number] = [0, 0, 0, 0];
|
||||
let boxRaw: [number, number, number, number] = [0, 0, 0, 0];
|
||||
let score = 0;
|
||||
|
@ -29,6 +30,90 @@ export async function load(config: Config): Promise<GraphModel> {
|
|||
return model;
|
||||
}
|
||||
|
||||
async function parseSinglePose(res, config, image) {
|
||||
keypoints.length = 0;
|
||||
const kpt = res[0][0];
|
||||
for (let id = 0; id < kpt.length; id++) {
|
||||
score = kpt[id][2];
|
||||
if (score > config.body.minConfidence) {
|
||||
keypoints.push({
|
||||
score: Math.round(100 * score) / 100,
|
||||
part: bodyParts[id],
|
||||
positionRaw: [ // normalized to 0..1
|
||||
kpt[id][1],
|
||||
kpt[id][0],
|
||||
],
|
||||
position: [ // normalized to input image size
|
||||
Math.round((image.shape[2] || 0) * kpt[id][1]),
|
||||
Math.round((image.shape[1] || 0) * kpt[id][0]),
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
||||
score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
|
||||
const x = keypoints.map((a) => a.position[0]);
|
||||
const y = keypoints.map((a) => a.position[1]);
|
||||
box = [
|
||||
Math.min(...x),
|
||||
Math.min(...y),
|
||||
Math.max(...x) - Math.min(...x),
|
||||
Math.max(...y) - Math.min(...y),
|
||||
];
|
||||
const xRaw = keypoints.map((a) => a.positionRaw[0]);
|
||||
const yRaw = keypoints.map((a) => a.positionRaw[1]);
|
||||
boxRaw = [
|
||||
Math.min(...xRaw),
|
||||
Math.min(...yRaw),
|
||||
Math.max(...xRaw) - Math.min(...xRaw),
|
||||
Math.max(...yRaw) - Math.min(...yRaw),
|
||||
];
|
||||
const persons: Array<Person> = [];
|
||||
persons.push({ id: 0, score, box, boxRaw, keypoints });
|
||||
return persons;
|
||||
}
|
||||
|
||||
async function parseMultiPose(res, config, image) {
|
||||
const persons: Array<Person> = [];
|
||||
for (let p = 0; p < res[0].length; p++) {
|
||||
const kpt = res[0][p];
|
||||
score = Math.round(100 * kpt[51 + 4]) / 100;
|
||||
// eslint-disable-next-line no-continue
|
||||
if (score < config.body.minConfidence) continue;
|
||||
keypoints.length = 0;
|
||||
for (let i = 0; i < 17; i++) {
|
||||
const partScore = Math.round(100 * kpt[3 * i + 2]) / 100;
|
||||
if (partScore > config.body.minConfidence) {
|
||||
keypoints.push({
|
||||
part: bodyParts[i],
|
||||
score: partScore,
|
||||
positionRaw: [
|
||||
kpt[3 * i + 1],
|
||||
kpt[3 * i + 0],
|
||||
],
|
||||
position: [
|
||||
Math.trunc(kpt[3 * i + 1] * (image.shape[2] || 0)),
|
||||
Math.trunc(kpt[3 * i + 0] * (image.shape[1] || 0)),
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
||||
boxRaw = [kpt[51 + 1], kpt[51 + 0], kpt[51 + 3] - kpt[51 + 1], kpt[51 + 2] - kpt[51 + 0]];
|
||||
persons.push({
|
||||
id: p,
|
||||
score,
|
||||
boxRaw,
|
||||
box: [
|
||||
Math.trunc(boxRaw[0] * (image.shape[2] || 0)),
|
||||
Math.trunc(boxRaw[1] * (image.shape[1] || 0)),
|
||||
Math.trunc(boxRaw[2] * (image.shape[2] || 0)),
|
||||
Math.trunc(boxRaw[3] * (image.shape[1] || 0)),
|
||||
],
|
||||
keypoints,
|
||||
});
|
||||
}
|
||||
return persons;
|
||||
}
|
||||
|
||||
export async function predict(image: Tensor, config: Config): Promise<Body[]> {
|
||||
if ((skipped < config.body.skipFrames) && config.skipFrame && Object.keys(keypoints).length > 0) {
|
||||
skipped++;
|
||||
|
@ -38,7 +123,9 @@ export async function predict(image: Tensor, config: Config): Promise<Body[]> {
|
|||
return new Promise(async (resolve) => {
|
||||
const tensor = tf.tidy(() => {
|
||||
if (!model.inputs[0].shape) return null;
|
||||
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
let inputSize = model.inputs[0].shape[2];
|
||||
if (inputSize === -1) inputSize = 256;
|
||||
const resize = tf.image.resizeBilinear(image, [inputSize, inputSize], false);
|
||||
const cast = tf.cast(resize, 'int32');
|
||||
return cast;
|
||||
});
|
||||
|
@ -47,46 +134,13 @@ export async function predict(image: Tensor, config: Config): Promise<Body[]> {
|
|||
if (config.body.enabled) resT = await model.predict(tensor);
|
||||
tf.dispose(tensor);
|
||||
|
||||
if (resT) {
|
||||
keypoints.length = 0;
|
||||
const res = await resT.array();
|
||||
tf.dispose(resT);
|
||||
const kpt = res[0][0];
|
||||
for (let id = 0; id < kpt.length; id++) {
|
||||
score = kpt[id][2];
|
||||
if (score > config.body.minConfidence) {
|
||||
keypoints.push({
|
||||
score: Math.round(100 * score) / 100,
|
||||
part: bodyParts[id],
|
||||
positionRaw: [ // normalized to 0..1
|
||||
kpt[id][1],
|
||||
kpt[id][0],
|
||||
],
|
||||
position: [ // normalized to input image size
|
||||
Math.round((image.shape[2] || 0) * kpt[id][1]),
|
||||
Math.round((image.shape[1] || 0) * kpt[id][0]),
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
|
||||
const x = keypoints.map((a) => a.position[0]);
|
||||
const y = keypoints.map((a) => a.position[1]);
|
||||
box = [
|
||||
Math.min(...x),
|
||||
Math.min(...y),
|
||||
Math.max(...x) - Math.min(...x),
|
||||
Math.max(...y) - Math.min(...y),
|
||||
];
|
||||
const xRaw = keypoints.map((a) => a.positionRaw[0]);
|
||||
const yRaw = keypoints.map((a) => a.positionRaw[1]);
|
||||
boxRaw = [
|
||||
Math.min(...xRaw),
|
||||
Math.min(...yRaw),
|
||||
Math.max(...xRaw) - Math.min(...xRaw),
|
||||
Math.max(...yRaw) - Math.min(...yRaw),
|
||||
];
|
||||
resolve([{ id: 0, score, box, boxRaw, keypoints }]);
|
||||
if (!resT) resolve([]);
|
||||
const res = await resT.array();
|
||||
let persons;
|
||||
if (resT.shape[2] === 17) persons = await parseSinglePose(res, config, image);
|
||||
else if (resT.shape[2] === 56) persons = await parseMultiPose(res, config, image);
|
||||
tf.dispose(resT);
|
||||
|
||||
resolve(persons);
|
||||
});
|
||||
}
|
||||
|
|
240
test/test.log
240
test/test.log
|
@ -1,120 +1,120 @@
|
|||
2021-08-19 16:14:50 [36mINFO: [39m @vladmandic/human version 2.1.3
|
||||
2021-08-19 16:14:50 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-08-19 16:14:50 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||
2021-08-19 16:14:50 [36mINFO: [39m test-node.js start
|
||||
2021-08-19 16:14:50 [35mSTATE:[39m test-node.js passed: create human
|
||||
2021-08-19 16:14:50 [36mINFO: [39m test-node.js human version: 2.1.3
|
||||
2021-08-19 16:14:50 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-08-19 16:14:50 [36mINFO: [39m test-node.js tfjs version: 3.8.0
|
||||
2021-08-19 16:14:51 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||
2021-08-19 16:14:51 [35mSTATE:[39m test-node.js passed: load models
|
||||
2021-08-19 16:14:51 [35mSTATE:[39m test-node.js result: defined models: 14 loaded models: 7
|
||||
2021-08-19 16:14:51 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||
2021-08-19 16:14:52 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||
2021-08-19 16:14:52 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
|
||||
2021-08-19 16:14:52 [32mDATA: [39m test-node.js result: performance: load: 418 total: 1284
|
||||
2021-08-19 16:14:53 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||
2021-08-19 16:14:53 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-19 16:14:53 [32mDATA: [39m test-node.js result: performance: load: 418 total: 1203
|
||||
2021-08-19 16:14:53 [36mINFO: [39m test-node.js test body variants
|
||||
2021-08-19 16:14:54 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-19 16:14:55 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-08-19 16:14:55 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-08-19 16:14:55 [32mDATA: [39m test-node.js result: performance: load: 418 total: 792
|
||||
2021-08-19 16:14:56 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-19 16:14:56 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-08-19 16:14:56 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-19 16:14:56 [32mDATA: [39m test-node.js result: performance: load: 418 total: 220
|
||||
2021-08-19 16:14:57 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||
2021-08-19 16:14:57 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-08-19 16:14:57 [32mDATA: [39m test-node.js result: performance: load: 418 total: 691
|
||||
2021-08-19 16:14:57 [36mINFO: [39m test-node.js test: first instance
|
||||
2021-08-19 16:14:57 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-08-19 16:14:58 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-08-19 16:14:58 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-08-19 16:14:58 [32mDATA: [39m test-node.js result: performance: load: 418 total: 1042
|
||||
2021-08-19 16:14:58 [36mINFO: [39m test-node.js test: second instance
|
||||
2021-08-19 16:14:58 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-08-19 16:14:59 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-08-19 16:14:59 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-08-19 16:14:59 [32mDATA: [39m test-node.js result: performance: load: 3 total: 1006
|
||||
2021-08-19 16:14:59 [36mINFO: [39m test-node.js test: concurrent
|
||||
2021-08-19 16:15:00 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-08-19 16:15:00 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-08-19 16:15:00 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-19 16:15:01 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-19 16:15:07 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-08-19 16:15:07 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":42}
|
||||
2021-08-19 16:15:07 [32mDATA: [39m test-node.js result: performance: load: 418 total: 5554
|
||||
2021-08-19 16:15:07 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-08-19 16:15:07 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":42}
|
||||
2021-08-19 16:15:07 [32mDATA: [39m test-node.js result: performance: load: 3 total: 5554
|
||||
2021-08-19 16:15:07 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-08-19 16:15:07 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":42}
|
||||
2021-08-19 16:15:07 [32mDATA: [39m test-node.js result: performance: load: 418 total: 5554
|
||||
2021-08-19 16:15:07 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-08-19 16:15:07 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":42}
|
||||
2021-08-19 16:15:07 [32mDATA: [39m test-node.js result: performance: load: 3 total: 5554
|
||||
2021-08-19 16:15:07 [36mINFO: [39m test-node.js test complete: 16737 ms
|
||||
2021-08-19 16:15:07 [36mINFO: [39m test-node-gpu.js start
|
||||
2021-08-19 16:15:08 [33mWARN: [39m test-node-gpu.js stderr: 2021-08-19 16:15:08.166531: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||
2021-08-19 16:15:08 [33mWARN: [39m test-node-gpu.js stderr: 2021-08-19 16:15:08.343531: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||
2021-08-19 16:15:08 [33mWARN: [39m test-node-gpu.js stderr: 2021-08-19 16:15:08.343658: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||
2021-08-19 16:15:08 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||
2021-08-19 16:15:08 [36mINFO: [39m test-node-gpu.js human version: 2.1.3
|
||||
2021-08-19 16:15:08 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-08-19 16:15:08 [36mINFO: [39m test-node-gpu.js tfjs version: 3.8.0
|
||||
2021-08-19 16:15:08 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||
2021-08-19 16:15:08 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||
2021-08-19 16:15:08 [35mSTATE:[39m test-node-gpu.js result: defined models: 14 loaded models: 7
|
||||
2021-08-19 16:15:08 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||
2021-08-19 16:15:10 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||
2021-08-19 16:15:10 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
|
||||
2021-08-19 16:15:10 [32mDATA: [39m test-node-gpu.js result: performance: load: 285 total: 1378
|
||||
2021-08-19 16:15:11 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||
2021-08-19 16:15:11 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-19 16:15:11 [32mDATA: [39m test-node-gpu.js result: performance: load: 285 total: 1206
|
||||
2021-08-19 16:15:11 [36mINFO: [39m test-node-gpu.js test body variants
|
||||
2021-08-19 16:15:12 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-19 16:15:13 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-08-19 16:15:13 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-08-19 16:15:13 [32mDATA: [39m test-node-gpu.js result: performance: load: 285 total: 788
|
||||
2021-08-19 16:15:13 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-19 16:15:14 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-08-19 16:15:14 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-19 16:15:14 [32mDATA: [39m test-node-gpu.js result: performance: load: 285 total: 218
|
||||
2021-08-19 16:15:14 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||
2021-08-19 16:15:14 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0,"keypoints":0}
|
||||
2021-08-19 16:15:14 [32mDATA: [39m test-node-gpu.js result: performance: load: 285 total: 193
|
||||
2021-08-19 16:15:14 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||
2021-08-19 16:15:14 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-08-19 16:15:14 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-08-19 16:15:14 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-08-19 16:15:14 [32mDATA: [39m test-node-gpu.js result: performance: load: 285 total: 143
|
||||
2021-08-19 16:15:14 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||
2021-08-19 16:15:15 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-08-19 16:15:16 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-08-19 16:15:16 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-08-19 16:15:16 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 1003
|
||||
2021-08-19 16:15:16 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||
2021-08-19 16:15:16 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-08-19 16:15:16 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-08-19 16:15:17 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-19 16:15:18 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-19 16:15:22 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-08-19 16:15:22 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":42}
|
||||
2021-08-19 16:15:22 [32mDATA: [39m test-node-gpu.js result: performance: load: 285 total: 4653
|
||||
2021-08-19 16:15:22 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-08-19 16:15:22 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":42}
|
||||
2021-08-19 16:15:22 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 4653
|
||||
2021-08-19 16:15:22 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-08-19 16:15:22 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":42}
|
||||
2021-08-19 16:15:22 [32mDATA: [39m test-node-gpu.js result: performance: load: 285 total: 4653
|
||||
2021-08-19 16:15:22 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-08-19 16:15:22 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":42}
|
||||
2021-08-19 16:15:22 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 4653
|
||||
2021-08-19 16:15:22 [36mINFO: [39m test-node-gpu.js test complete: 14260 ms
|
||||
2021-08-19 16:15:23 [36mINFO: [39m test-node-wasm.js start
|
||||
2021-08-19 16:15:23 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
||||
2021-08-19 16:15:23 [31mERROR:[39m test-node-wasm.js aborting test
|
||||
2021-08-19 16:15:23 [36mINFO: [39m status: {"passed":46,"failed":1}
|
||||
2021-08-20 09:04:08 [36mINFO: [39m @vladmandic/human version 2.1.4
|
||||
2021-08-20 09:04:08 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-08-20 09:04:08 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||
2021-08-20 09:04:08 [36mINFO: [39m test-node.js start
|
||||
2021-08-20 09:04:08 [35mSTATE:[39m test-node.js passed: create human
|
||||
2021-08-20 09:04:08 [36mINFO: [39m test-node.js human version: 2.1.4
|
||||
2021-08-20 09:04:08 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-08-20 09:04:08 [36mINFO: [39m test-node.js tfjs version: 3.8.0
|
||||
2021-08-20 09:04:09 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||
2021-08-20 09:04:09 [35mSTATE:[39m test-node.js passed: load models
|
||||
2021-08-20 09:04:09 [35mSTATE:[39m test-node.js result: defined models: 14 loaded models: 7
|
||||
2021-08-20 09:04:09 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||
2021-08-20 09:04:10 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||
2021-08-20 09:04:10 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
|
||||
2021-08-20 09:04:10 [32mDATA: [39m test-node.js result: performance: load: 372 total: 1090
|
||||
2021-08-20 09:04:11 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||
2021-08-20 09:04:11 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-20 09:04:11 [32mDATA: [39m test-node.js result: performance: load: 372 total: 1054
|
||||
2021-08-20 09:04:11 [36mINFO: [39m test-node.js test body variants
|
||||
2021-08-20 09:04:12 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-20 09:04:13 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-08-20 09:04:13 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-08-20 09:04:13 [32mDATA: [39m test-node.js result: performance: load: 372 total: 687
|
||||
2021-08-20 09:04:13 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-20 09:04:14 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-08-20 09:04:14 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-20 09:04:14 [32mDATA: [39m test-node.js result: performance: load: 372 total: 193
|
||||
2021-08-20 09:04:14 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||
2021-08-20 09:04:14 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-08-20 09:04:14 [32mDATA: [39m test-node.js result: performance: load: 372 total: 591
|
||||
2021-08-20 09:04:14 [36mINFO: [39m test-node.js test: first instance
|
||||
2021-08-20 09:04:15 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-08-20 09:04:16 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-08-20 09:04:16 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-08-20 09:04:16 [32mDATA: [39m test-node.js result: performance: load: 372 total: 975
|
||||
2021-08-20 09:04:16 [36mINFO: [39m test-node.js test: second instance
|
||||
2021-08-20 09:04:16 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-08-20 09:04:17 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-08-20 09:04:17 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-08-20 09:04:17 [32mDATA: [39m test-node.js result: performance: load: 3 total: 902
|
||||
2021-08-20 09:04:17 [36mINFO: [39m test-node.js test: concurrent
|
||||
2021-08-20 09:04:17 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-08-20 09:04:17 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-08-20 09:04:18 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-20 09:04:19 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-20 09:04:23 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-08-20 09:04:23 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":17}
|
||||
2021-08-20 09:04:23 [32mDATA: [39m test-node.js result: performance: load: 372 total: 4140
|
||||
2021-08-20 09:04:23 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-08-20 09:04:23 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":17}
|
||||
2021-08-20 09:04:23 [32mDATA: [39m test-node.js result: performance: load: 3 total: 4140
|
||||
2021-08-20 09:04:23 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-08-20 09:04:23 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-20 09:04:23 [32mDATA: [39m test-node.js result: performance: load: 372 total: 4140
|
||||
2021-08-20 09:04:23 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-08-20 09:04:23 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-20 09:04:23 [32mDATA: [39m test-node.js result: performance: load: 3 total: 4140
|
||||
2021-08-20 09:04:23 [36mINFO: [39m test-node.js test complete: 14355 ms
|
||||
2021-08-20 09:04:23 [36mINFO: [39m test-node-gpu.js start
|
||||
2021-08-20 09:04:23 [33mWARN: [39m test-node-gpu.js stderr: 2021-08-20 09:04:23.693064: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||
2021-08-20 09:04:23 [33mWARN: [39m test-node-gpu.js stderr: 2021-08-20 09:04:23.741809: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||
2021-08-20 09:04:23 [33mWARN: [39m test-node-gpu.js stderr: 2021-08-20 09:04:23.741844: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||
2021-08-20 09:04:23 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||
2021-08-20 09:04:23 [36mINFO: [39m test-node-gpu.js human version: 2.1.4
|
||||
2021-08-20 09:04:23 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-08-20 09:04:23 [36mINFO: [39m test-node-gpu.js tfjs version: 3.8.0
|
||||
2021-08-20 09:04:24 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||
2021-08-20 09:04:24 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||
2021-08-20 09:04:24 [35mSTATE:[39m test-node-gpu.js result: defined models: 14 loaded models: 7
|
||||
2021-08-20 09:04:24 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||
2021-08-20 09:04:25 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||
2021-08-20 09:04:25 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
|
||||
2021-08-20 09:04:25 [32mDATA: [39m test-node-gpu.js result: performance: load: 277 total: 1107
|
||||
2021-08-20 09:04:26 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||
2021-08-20 09:04:26 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-20 09:04:26 [32mDATA: [39m test-node-gpu.js result: performance: load: 277 total: 1083
|
||||
2021-08-20 09:04:26 [36mINFO: [39m test-node-gpu.js test body variants
|
||||
2021-08-20 09:04:27 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-20 09:04:27 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-08-20 09:04:27 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-08-20 09:04:27 [32mDATA: [39m test-node-gpu.js result: performance: load: 277 total: 728
|
||||
2021-08-20 09:04:28 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-20 09:04:28 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-08-20 09:04:28 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-20 09:04:28 [32mDATA: [39m test-node-gpu.js result: performance: load: 277 total: 191
|
||||
2021-08-20 09:04:29 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||
2021-08-20 09:04:29 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-08-20 09:04:29 [32mDATA: [39m test-node-gpu.js result: performance: load: 277 total: 608
|
||||
2021-08-20 09:04:29 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||
2021-08-20 09:04:29 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-08-20 09:04:30 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-08-20 09:04:30 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-08-20 09:04:30 [32mDATA: [39m test-node-gpu.js result: performance: load: 277 total: 915
|
||||
2021-08-20 09:04:30 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||
2021-08-20 09:04:31 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-08-20 09:04:32 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-08-20 09:04:32 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-08-20 09:04:32 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 982
|
||||
2021-08-20 09:04:32 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||
2021-08-20 09:04:32 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-08-20 09:04:32 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-08-20 09:04:32 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-20 09:04:33 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-08-20 09:04:38 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-08-20 09:04:38 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":17}
|
||||
2021-08-20 09:04:38 [32mDATA: [39m test-node-gpu.js result: performance: load: 277 total: 4187
|
||||
2021-08-20 09:04:38 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-08-20 09:04:38 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":17}
|
||||
2021-08-20 09:04:38 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 4187
|
||||
2021-08-20 09:04:38 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-08-20 09:04:38 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-20 09:04:38 [32mDATA: [39m test-node-gpu.js result: performance: load: 277 total: 4187
|
||||
2021-08-20 09:04:38 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-08-20 09:04:38 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-08-20 09:04:38 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 4187
|
||||
2021-08-20 09:04:38 [36mINFO: [39m test-node-gpu.js test complete: 14218 ms
|
||||
2021-08-20 09:04:38 [36mINFO: [39m test-node-wasm.js start
|
||||
2021-08-20 09:04:38 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
||||
2021-08-20 09:04:38 [31mERROR:[39m test-node-wasm.js aborting test
|
||||
2021-08-20 09:04:38 [36mINFO: [39m status: {"passed":46,"failed":1}
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit bdc4077a3df07abdf4a2d5b2d2beadf2e573e8d8
|
||||
Subproject commit c12e036ac382043f4b3a85cf71f93927af56cfe4
|
Loading…
Reference in New Issue