mirror of https://github.com/vladmandic/human
update tests and demos
parent
38ab0ada4b
commit
fc52025be6
10
CHANGELOG.md
10
CHANGELOG.md
|
@ -9,14 +9,16 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
### **HEAD -> main** 2021/06/01 mandic00@live.com
|
### **HEAD -> main** 2021/06/02 mandic00@live.com
|
||||||
|
|
||||||
|
- implemented drag & drop for image processing
|
||||||
|
|
||||||
|
### **origin/main** 2021/06/01 mandic00@live.com
|
||||||
|
|
||||||
|
- breaking changes to results.face output properties
|
||||||
- breaking changes to results.object output properties
|
- breaking changes to results.object output properties
|
||||||
- breaking changes to results.hand output properties
|
- breaking changes to results.hand output properties
|
||||||
- breaking changes to results.body output properties
|
- breaking changes to results.body output properties
|
||||||
|
|
||||||
### **origin/main** 2021/05/31 mandic00@live.com
|
|
||||||
|
|
||||||
- implemented human.next global interpolation method
|
- implemented human.next global interpolation method
|
||||||
- finished draw buffering and smoothing and enabled by default
|
- finished draw buffering and smoothing and enabled by default
|
||||||
- implemented service worker
|
- implemented service worker
|
||||||
|
|
|
@ -30,6 +30,7 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) ap
|
||||||
- To start video detection, simply press *Play*
|
- To start video detection, simply press *Play*
|
||||||
- To process images, simply drag & drop in your Browser window
|
- To process images, simply drag & drop in your Browser window
|
||||||
- Note: For optimal performance, select only models you'd like to use
|
- Note: For optimal performance, select only models you'd like to use
|
||||||
|
- Note: If you have modern GPU, WebGL (default) backend is preferred, otherwise select WASM backend
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -183,44 +183,36 @@ async function main() {
|
||||||
// pre-load human models
|
// pre-load human models
|
||||||
await human.load();
|
await human.load();
|
||||||
|
|
||||||
let res;
|
|
||||||
let images = [];
|
let images = [];
|
||||||
let dir = [];
|
let dir = [];
|
||||||
// load face descriptor database
|
// load face descriptor database
|
||||||
await getFaceDB();
|
await getFaceDB();
|
||||||
|
|
||||||
// enumerate all sample images in /assets
|
// enumerate all sample images in /assets
|
||||||
res = await fetch('/assets');
|
const res = await fetch('/samples/groups');
|
||||||
dir = (res && res.ok) ? await res.json() : [];
|
dir = (res && res.ok) ? await res.json() : [];
|
||||||
images = images.concat(dir.filter((img) => (img.endsWith('.jpg') && img.includes('sample'))));
|
images = images.concat(dir.filter((img) => (img.endsWith('.jpg') && img.includes('sample'))));
|
||||||
// enumerate additional private test images in /private, not includded in git repository
|
|
||||||
res = await fetch('/private/me');
|
|
||||||
dir = (res && res.ok) ? await res.json() : [];
|
|
||||||
images = images.concat(dir.filter((img) => (img.endsWith('.jpg'))));
|
|
||||||
|
|
||||||
// enumerate additional error images, not includded in git repository
|
|
||||||
res = await fetch('/private/err');
|
|
||||||
dir = (res && res.ok) ? await res.json() : [];
|
|
||||||
images = images.concat(dir.filter((img) => (img.endsWith('.jpg'))));
|
|
||||||
log('Enumerated:', images.length, 'images');
|
|
||||||
|
|
||||||
// could not dynamically enumerate images so using static list
|
// could not dynamically enumerate images so using static list
|
||||||
if (images.length === 0) {
|
if (images.length === 0) {
|
||||||
images = [
|
images = [
|
||||||
'sample1.jpg',
|
'groups/group1.jpg',
|
||||||
'sample2.jpg',
|
'groups/group2.jpg',
|
||||||
'sample3.jpg',
|
'groups/group3.jpg',
|
||||||
'sample4.jpg',
|
'groups/group4.jpg',
|
||||||
'sample5.jpg',
|
'groups/group5.jpg',
|
||||||
'sample6.jpg',
|
'groups/group6.jpg',
|
||||||
'sample6.jpg',
|
'groups/group7.jpg',
|
||||||
'sample-me.jpg',
|
'groups/group8.jpg',
|
||||||
'human-sample-face.jpg',
|
'groups/group9.jpg',
|
||||||
'human-sample-upper.jpg',
|
'groups/group10.jpg',
|
||||||
'human-sample-body.jpg',
|
'groups/group11.jpg',
|
||||||
|
'groups/group12.jpg',
|
||||||
|
'groups/group13.jpg',
|
||||||
|
'groups/group14.jpg',
|
||||||
];
|
];
|
||||||
// add prefix for gitpages
|
// add prefix for gitpages
|
||||||
images = images.map((a) => `/human/assets/${a}`);
|
images = images.map((a) => `/samples/${a}`);
|
||||||
log('Adding static image list:', images.length, 'images');
|
log('Adding static image list:', images.length, 'images');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -114,7 +114,7 @@ var config = {
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "blazeface.json",
|
modelPath: "blazeface.json",
|
||||||
rotation: true,
|
rotation: true,
|
||||||
maxDetected: 5,
|
maxDetected: 15,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
iouThreshold: 0.1,
|
iouThreshold: 0.1,
|
||||||
|
@ -4458,8 +4458,6 @@ var detectFace = async (parent, input) => {
|
||||||
delete faces[i].annotations.rightEyeIris;
|
delete faces[i].annotations.rightEyeIris;
|
||||||
}
|
}
|
||||||
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
||||||
if (faces[i].image)
|
|
||||||
delete faces[i].image;
|
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
|
@ -4473,6 +4471,8 @@ var detectFace = async (parent, input) => {
|
||||||
tensor: parent.config.face.detector.return ? tfjs_esm_exports.squeeze(faces[i].image) : null
|
tensor: parent.config.face.detector.return ? tfjs_esm_exports.squeeze(faces[i].image) : null
|
||||||
});
|
});
|
||||||
tfjs_esm_exports.dispose(faces[i].image);
|
tfjs_esm_exports.dispose(faces[i].image);
|
||||||
|
if (faces[i].image)
|
||||||
|
delete faces[i].image;
|
||||||
parent.analyze("End Face");
|
parent.analyze("End Face");
|
||||||
}
|
}
|
||||||
parent.analyze("End FaceMesh:");
|
parent.analyze("End FaceMesh:");
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -108,7 +108,7 @@ var config = {
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "blazeface.json",
|
modelPath: "blazeface.json",
|
||||||
rotation: true,
|
rotation: true,
|
||||||
maxDetected: 5,
|
maxDetected: 15,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
iouThreshold: 0.1,
|
iouThreshold: 0.1,
|
||||||
|
@ -62394,8 +62394,6 @@ var detectFace = async (parent, input2) => {
|
||||||
delete faces[i].annotations.rightEyeIris;
|
delete faces[i].annotations.rightEyeIris;
|
||||||
}
|
}
|
||||||
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input2.shape[2] : 0;
|
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input2.shape[2] : 0;
|
||||||
if (faces[i].image)
|
|
||||||
delete faces[i].image;
|
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
|
@ -62409,6 +62407,8 @@ var detectFace = async (parent, input2) => {
|
||||||
tensor: parent.config.face.detector.return ? squeeze(faces[i].image) : null
|
tensor: parent.config.face.detector.return ? squeeze(faces[i].image) : null
|
||||||
});
|
});
|
||||||
dispose(faces[i].image);
|
dispose(faces[i].image);
|
||||||
|
if (faces[i].image)
|
||||||
|
delete faces[i].image;
|
||||||
parent.analyze("End Face");
|
parent.analyze("End Face");
|
||||||
}
|
}
|
||||||
parent.analyze("End FaceMesh:");
|
parent.analyze("End FaceMesh:");
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -116,7 +116,7 @@ var Human = (() => {
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "blazeface.json",
|
modelPath: "blazeface.json",
|
||||||
rotation: true,
|
rotation: true,
|
||||||
maxDetected: 5,
|
maxDetected: 15,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
iouThreshold: 0.1,
|
iouThreshold: 0.1,
|
||||||
|
@ -62402,8 +62402,6 @@ return a / b;`;
|
||||||
delete faces[i].annotations.rightEyeIris;
|
delete faces[i].annotations.rightEyeIris;
|
||||||
}
|
}
|
||||||
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input2.shape[2] : 0;
|
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input2.shape[2] : 0;
|
||||||
if (faces[i].image)
|
|
||||||
delete faces[i].image;
|
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
|
@ -62417,6 +62415,8 @@ return a / b;`;
|
||||||
tensor: parent.config.face.detector.return ? squeeze(faces[i].image) : null
|
tensor: parent.config.face.detector.return ? squeeze(faces[i].image) : null
|
||||||
});
|
});
|
||||||
dispose(faces[i].image);
|
dispose(faces[i].image);
|
||||||
|
if (faces[i].image)
|
||||||
|
delete faces[i].image;
|
||||||
parent.analyze("End Face");
|
parent.analyze("End Face");
|
||||||
}
|
}
|
||||||
parent.analyze("End FaceMesh:");
|
parent.analyze("End FaceMesh:");
|
||||||
|
|
|
@ -156,7 +156,7 @@ var config = {
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "blazeface.json",
|
modelPath: "blazeface.json",
|
||||||
rotation: true,
|
rotation: true,
|
||||||
maxDetected: 5,
|
maxDetected: 15,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
iouThreshold: 0.1,
|
iouThreshold: 0.1,
|
||||||
|
@ -4479,8 +4479,6 @@ var detectFace = async (parent, input) => {
|
||||||
delete faces[i].annotations.rightEyeIris;
|
delete faces[i].annotations.rightEyeIris;
|
||||||
}
|
}
|
||||||
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
||||||
if (faces[i].image)
|
|
||||||
delete faces[i].image;
|
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
|
@ -4494,6 +4492,8 @@ var detectFace = async (parent, input) => {
|
||||||
tensor: parent.config.face.detector.return ? tf8.squeeze(faces[i].image) : null
|
tensor: parent.config.face.detector.return ? tf8.squeeze(faces[i].image) : null
|
||||||
});
|
});
|
||||||
tf8.dispose(faces[i].image);
|
tf8.dispose(faces[i].image);
|
||||||
|
if (faces[i].image)
|
||||||
|
delete faces[i].image;
|
||||||
parent.analyze("End Face");
|
parent.analyze("End Face");
|
||||||
}
|
}
|
||||||
parent.analyze("End FaceMesh:");
|
parent.analyze("End FaceMesh:");
|
||||||
|
|
|
@ -157,7 +157,7 @@ var config = {
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "blazeface.json",
|
modelPath: "blazeface.json",
|
||||||
rotation: true,
|
rotation: true,
|
||||||
maxDetected: 5,
|
maxDetected: 15,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
iouThreshold: 0.1,
|
iouThreshold: 0.1,
|
||||||
|
@ -4480,8 +4480,6 @@ var detectFace = async (parent, input) => {
|
||||||
delete faces[i].annotations.rightEyeIris;
|
delete faces[i].annotations.rightEyeIris;
|
||||||
}
|
}
|
||||||
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
||||||
if (faces[i].image)
|
|
||||||
delete faces[i].image;
|
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
|
@ -4495,6 +4493,8 @@ var detectFace = async (parent, input) => {
|
||||||
tensor: parent.config.face.detector.return ? tf8.squeeze(faces[i].image) : null
|
tensor: parent.config.face.detector.return ? tf8.squeeze(faces[i].image) : null
|
||||||
});
|
});
|
||||||
tf8.dispose(faces[i].image);
|
tf8.dispose(faces[i].image);
|
||||||
|
if (faces[i].image)
|
||||||
|
delete faces[i].image;
|
||||||
parent.analyze("End Face");
|
parent.analyze("End Face");
|
||||||
}
|
}
|
||||||
parent.analyze("End FaceMesh:");
|
parent.analyze("End FaceMesh:");
|
||||||
|
|
|
@ -156,7 +156,7 @@ var config = {
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "blazeface.json",
|
modelPath: "blazeface.json",
|
||||||
rotation: true,
|
rotation: true,
|
||||||
maxDetected: 5,
|
maxDetected: 15,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
iouThreshold: 0.1,
|
iouThreshold: 0.1,
|
||||||
|
@ -4479,8 +4479,6 @@ var detectFace = async (parent, input) => {
|
||||||
delete faces[i].annotations.rightEyeIris;
|
delete faces[i].annotations.rightEyeIris;
|
||||||
}
|
}
|
||||||
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
|
||||||
if (faces[i].image)
|
|
||||||
delete faces[i].image;
|
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
|
@ -4494,6 +4492,8 @@ var detectFace = async (parent, input) => {
|
||||||
tensor: parent.config.face.detector.return ? tf8.squeeze(faces[i].image) : null
|
tensor: parent.config.face.detector.return ? tf8.squeeze(faces[i].image) : null
|
||||||
});
|
});
|
||||||
tf8.dispose(faces[i].image);
|
tf8.dispose(faces[i].image);
|
||||||
|
if (faces[i].image)
|
||||||
|
delete faces[i].image;
|
||||||
parent.analyze("End Face");
|
parent.analyze("End Face");
|
||||||
}
|
}
|
||||||
parent.analyze("End FaceMesh:");
|
parent.analyze("End FaceMesh:");
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
# Human Library: Sample Images
|
||||||
|
|
||||||
|
Sample Images used by `Human` library demos and automated tests
|
||||||
|
Not required for normal funcioning of library
|
|
@ -4,3 +4,5 @@ For details see Wiki:
|
||||||
|
|
||||||
- [**Build Process**](https://github.com/vladmandic/human/wiki/Build-Process)
|
- [**Build Process**](https://github.com/vladmandic/human/wiki/Build-Process)
|
||||||
- [**Development Server**](https://github.com/vladmandic/human/wiki/Development-Server)
|
- [**Development Server**](https://github.com/vladmandic/human/wiki/Development-Server)
|
||||||
|
|
||||||
|
Not required for normal funcioning of library
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
2021-06-01 08:56:45 [36mINFO: [39m @vladmandic/human version 2.0.0
|
2021-06-02 12:45:19 [36mINFO: [39m @vladmandic/human version 2.0.0
|
||||||
2021-06-01 08:56:45 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
2021-06-02 12:45:19 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||||
2021-06-01 08:56:45 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
2021-06-02 12:45:19 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||||
2021-06-01 08:56:45 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
|
2021-06-02 12:45:19 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-06-01 08:56:45 [35mSTATE:[39m Build for: node type: node: {"imports":40,"importBytes":418566,"outputBytes":371338,"outputFiles":"dist/human.node.js"}
|
2021-06-02 12:45:19 [35mSTATE:[39m Build for: node type: node: {"imports":40,"importBytes":418563,"outputBytes":371336,"outputFiles":"dist/human.node.js"}
|
||||||
2021-06-01 08:56:45 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
|
2021-06-02 12:45:19 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-06-01 08:56:46 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":40,"importBytes":418574,"outputBytes":371342,"outputFiles":"dist/human.node-gpu.js"}
|
2021-06-02 12:45:20 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":40,"importBytes":418571,"outputBytes":371340,"outputFiles":"dist/human.node-gpu.js"}
|
||||||
2021-06-01 08:56:46 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
|
2021-06-02 12:45:20 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-06-01 08:56:46 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":40,"importBytes":418641,"outputBytes":371414,"outputFiles":"dist/human.node-wasm.js"}
|
2021-06-02 12:45:20 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":40,"importBytes":418638,"outputBytes":371412,"outputFiles":"dist/human.node-wasm.js"}
|
||||||
2021-06-01 08:56:46 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
2021-06-02 12:45:20 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-06-01 08:56:46 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":40,"importBytes":418668,"outputBytes":244745,"outputFiles":"dist/human.esm-nobundle.js"}
|
2021-06-02 12:45:20 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":40,"importBytes":418665,"outputBytes":244744,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||||
2021-06-01 08:56:46 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2478,"outputBytes":1111418,"outputFiles":"dist/tfjs.esm.js"}
|
2021-06-02 12:45:20 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2478,"outputBytes":1111418,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-06-01 08:56:47 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":40,"importBytes":1528692,"outputBytes":1352544,"outputFiles":"dist/human.js"}
|
2021-06-02 12:45:21 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":40,"importBytes":1528689,"outputBytes":1352543,"outputFiles":"dist/human.js"}
|
||||||
2021-06-01 08:56:47 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":40,"importBytes":1528692,"outputBytes":1352536,"outputFiles":"dist/human.esm.js"}
|
2021-06-02 12:45:21 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":40,"importBytes":1528689,"outputBytes":1352535,"outputFiles":"dist/human.esm.js"}
|
||||||
2021-06-01 08:56:47 [36mINFO: [39m Generate types: ["src/human.ts"]
|
2021-06-02 12:45:21 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||||
2021-06-01 08:56:52 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
2021-06-02 12:45:26 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||||
2021-06-01 08:56:52 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
2021-06-02 12:45:26 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||||
|
|
|
@ -244,7 +244,7 @@ const config: Config = {
|
||||||
rotation: true, // use best-guess rotated face image or just box with rotation as-is
|
rotation: true, // use best-guess rotated face image or just box with rotation as-is
|
||||||
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
||||||
// this parameter is not valid in nodejs
|
// this parameter is not valid in nodejs
|
||||||
maxDetected: 5, // maximum number of faces detected in the input
|
maxDetected: 15, // maximum number of faces detected in the input
|
||||||
// should be set to the minimum number for performance
|
// should be set to the minimum number for performance
|
||||||
skipFrames: 15, // how many max frames to go without re-running the face bounding box detector
|
skipFrames: 15, // how many max frames to go without re-running the face bounding box detector
|
||||||
// only used when cacheSensitivity is not zero
|
// only used when cacheSensitivity is not zero
|
||||||
|
|
|
@ -211,7 +211,6 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
: 0;
|
: 0;
|
||||||
|
|
||||||
// combine results
|
// combine results
|
||||||
if (faces[i].image) delete faces[i].image;
|
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
|
@ -226,6 +225,8 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
});
|
});
|
||||||
// dispose original face tensor
|
// dispose original face tensor
|
||||||
tf.dispose(faces[i].image);
|
tf.dispose(faces[i].image);
|
||||||
|
// delete temp face image
|
||||||
|
if (faces[i].image) delete faces[i].image;
|
||||||
|
|
||||||
parent.analyze('End Face');
|
parent.analyze('End Face');
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
## Automatic Tests
|
## Automatic Tests
|
||||||
|
|
||||||
|
Not required for normal funcioning of library
|
||||||
|
|
||||||
### NodeJS using TensorFlow library
|
### NodeJS using TensorFlow library
|
||||||
|
|
||||||
- Image filters are disabled due to lack of Canvas and WeBGL access
|
- Image filters are disabled due to lack of Canvas and WeBGL access
|
||||||
|
|
|
@ -140,22 +140,22 @@ async function test(Human, inputConfig) {
|
||||||
|
|
||||||
log('info', 'test body variants');
|
log('info', 'test body variants');
|
||||||
config.body = { modelPath: 'posenet.json', enabled: true };
|
config.body = { modelPath: 'posenet.json', enabled: true };
|
||||||
await testDetect(human, 'assets/human-sample-body.jpg', 'posenet');
|
await testDetect(human, 'samples/ai-body.jpg', 'posenet');
|
||||||
config.body = { modelPath: 'movenet-lightning.json', enabled: true };
|
config.body = { modelPath: 'movenet-lightning.json', enabled: true };
|
||||||
await testDetect(human, 'assets/human-sample-body.jpg', 'movenet');
|
await testDetect(human, 'samples/ai-body.jpg', 'movenet');
|
||||||
|
|
||||||
await testDetect(human, null, 'default');
|
await testDetect(human, null, 'default');
|
||||||
log('info', 'test: first instance');
|
log('info', 'test: first instance');
|
||||||
await testDetect(human, 'assets/sample-me.jpg', 'default');
|
await testDetect(human, 'samples/ai-upper.jpg', 'default');
|
||||||
log('info', 'test: second instance');
|
log('info', 'test: second instance');
|
||||||
const second = new Human(config);
|
const second = new Human(config);
|
||||||
await testDetect(second, 'assets/sample-me.jpg', 'default');
|
await testDetect(second, 'samples/ai-upper.jpg', 'default');
|
||||||
log('info', 'test: concurrent');
|
log('info', 'test: concurrent');
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
testDetect(human, 'assets/human-sample-face.jpg', 'default'),
|
testDetect(human, 'samples/ai-face.jpg', 'default'),
|
||||||
testDetect(second, 'assets/human-sample-face.jpg', 'default'),
|
testDetect(second, 'samples/ai-face.jpg', 'default'),
|
||||||
testDetect(human, 'assets/human-sample-body.jpg', 'default'),
|
testDetect(human, 'samples/ai-body.jpg', 'default'),
|
||||||
testDetect(second, 'assets/human-sample-body.jpg', 'default'),
|
testDetect(second, 'samples/ai-body.jpg', 'default'),
|
||||||
]);
|
]);
|
||||||
const t1 = process.hrtime.bigint();
|
const t1 = process.hrtime.bigint();
|
||||||
log('info', 'test complete:', Math.trunc(Number(t1 - t0) / 1000 / 1000), 'ms');
|
log('info', 'test complete:', Math.trunc(Number(t1 - t0) / 1000 / 1000), 'ms');
|
||||||
|
|
338
test/test.log
338
test/test.log
|
@ -1,169 +1,169 @@
|
||||||
2021-06-01 08:57:45 [36mINFO: [39m @vladmandic/human version 2.0.0
|
2021-06-02 13:20:17 [36mINFO: [39m @vladmandic/human version 2.0.0
|
||||||
2021-06-01 08:57:45 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
2021-06-02 13:20:17 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||||
2021-06-01 08:57:45 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
2021-06-02 13:20:17 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||||
2021-06-01 08:57:45 [36mINFO: [39m test-node.js start
|
2021-06-02 13:20:17 [36mINFO: [39m test-node.js start
|
||||||
2021-06-01 08:57:45 [35mSTATE:[39m test-node.js passed: create human
|
2021-06-02 13:20:18 [35mSTATE:[39m test-node.js passed: create human
|
||||||
2021-06-01 08:57:45 [36mINFO: [39m test-node.js human version: 2.0.0
|
2021-06-02 13:20:18 [36mINFO: [39m test-node.js human version: 2.0.0
|
||||||
2021-06-01 08:57:45 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.0.0
|
2021-06-02 13:20:18 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.0.0
|
||||||
2021-06-01 08:57:45 [36mINFO: [39m test-node.js tfjs version: 3.6.0
|
2021-06-02 13:20:18 [36mINFO: [39m test-node.js tfjs version: 3.6.0
|
||||||
2021-06-01 08:57:46 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
2021-06-02 13:20:18 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||||
2021-06-01 08:57:46 [35mSTATE:[39m test-node.js passed: load models
|
2021-06-02 13:20:18 [35mSTATE:[39m test-node.js passed: load models
|
||||||
2021-06-01 08:57:46 [35mSTATE:[39m test-node.js result: defined models: 13 loaded models: 6
|
2021-06-02 13:20:18 [35mSTATE:[39m test-node.js result: defined models: 13 loaded models: 6
|
||||||
2021-06-01 08:57:46 [35mSTATE:[39m test-node.js passed: warmup: none default
|
2021-06-02 13:20:18 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||||
2021-06-01 08:57:47 [35mSTATE:[39m test-node.js passed: warmup: face default
|
2021-06-02 13:20:20 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||||
2021-06-01 08:57:47 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
2021-06-02 13:20:20 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
||||||
2021-06-01 08:57:47 [32mDATA: [39m test-node.js result: performance: load: 334 total: 1463
|
2021-06-02 13:20:20 [32mDATA: [39m test-node.js result: performance: load: 332 total: 1557
|
||||||
2021-06-01 08:57:49 [35mSTATE:[39m test-node.js passed: warmup: body default
|
2021-06-02 13:20:21 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||||
2021-06-01 08:57:49 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
2021-06-02 13:20:21 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:57:49 [32mDATA: [39m test-node.js result: performance: load: 334 total: 1665
|
2021-06-02 13:20:21 [32mDATA: [39m test-node.js result: performance: load: 332 total: 1589
|
||||||
2021-06-01 08:57:49 [36mINFO: [39m test-node.js test body variants
|
2021-06-02 13:20:21 [36mINFO: [39m test-node.js test body variants
|
||||||
2021-06-01 08:57:50 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:20:23 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:57:51 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg posenet
|
2021-06-02 13:20:24 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg posenet
|
||||||
2021-06-01 08:57:51 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
2021-06-02 13:20:24 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||||
2021-06-01 08:57:51 [32mDATA: [39m test-node.js result: performance: load: 334 total: 1015
|
2021-06-02 13:20:24 [32mDATA: [39m test-node.js result: performance: load: 332 total: 1011
|
||||||
2021-06-01 08:57:52 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:20:25 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:57:52 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg movenet
|
2021-06-02 13:20:25 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg movenet
|
||||||
2021-06-01 08:57:52 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
2021-06-02 13:20:25 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:57:52 [32mDATA: [39m test-node.js result: performance: load: 334 total: 376
|
2021-06-02 13:20:25 [32mDATA: [39m test-node.js result: performance: load: 332 total: 329
|
||||||
2021-06-01 08:57:53 [35mSTATE:[39m test-node.js passed: detect: random default
|
2021-06-02 13:20:26 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||||
2021-06-01 08:57:53 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
2021-06-02 13:20:26 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||||
2021-06-01 08:57:53 [32mDATA: [39m test-node.js result: performance: load: 334 total: 882
|
2021-06-02 13:20:26 [32mDATA: [39m test-node.js result: performance: load: 332 total: 846
|
||||||
2021-06-01 08:57:53 [36mINFO: [39m test-node.js test: first instance
|
2021-06-02 13:20:26 [36mINFO: [39m test-node.js test: first instance
|
||||||
2021-06-01 08:57:53 [35mSTATE:[39m test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
2021-06-02 13:20:26 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||||
2021-06-01 08:57:55 [35mSTATE:[39m test-node.js passed: detect: assets/sample-me.jpg default
|
2021-06-02 13:20:28 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||||
2021-06-01 08:57:55 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"age":39.2,"gender":"male"} {"score":0.68,"class":"person"} {"score":0.67,"keypoints":7}
|
2021-06-02 13:20:28 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||||
2021-06-01 08:57:55 [32mDATA: [39m test-node.js result: performance: load: 334 total: 1387
|
2021-06-02 13:20:28 [32mDATA: [39m test-node.js result: performance: load: 332 total: 1373
|
||||||
2021-06-01 08:57:55 [36mINFO: [39m test-node.js test: second instance
|
2021-06-02 13:20:28 [36mINFO: [39m test-node.js test: second instance
|
||||||
2021-06-01 08:57:55 [35mSTATE:[39m test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
2021-06-02 13:20:28 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||||
2021-06-01 08:57:57 [35mSTATE:[39m test-node.js passed: detect: assets/sample-me.jpg default
|
2021-06-02 13:20:29 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||||
2021-06-01 08:57:57 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"age":39.2,"gender":"male"} {"score":0.68,"class":"person"} {"score":0.67,"keypoints":7}
|
2021-06-02 13:20:29 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||||
2021-06-01 08:57:57 [32mDATA: [39m test-node.js result: performance: load: 2 total: 1339
|
2021-06-02 13:20:29 [32mDATA: [39m test-node.js result: performance: load: 5 total: 1272
|
||||||
2021-06-01 08:57:57 [36mINFO: [39m test-node.js test: concurrent
|
2021-06-02 13:20:29 [36mINFO: [39m test-node.js test: concurrent
|
||||||
2021-06-01 08:57:57 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
2021-06-02 13:20:29 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||||
2021-06-01 08:57:57 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
2021-06-02 13:20:29 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||||
2021-06-01 08:57:58 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:20:30 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:57:59 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:20:31 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:58:04 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-face.jpg default
|
2021-06-02 13:20:37 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||||
2021-06-01 08:58:04 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
2021-06-02 13:20:37 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||||
2021-06-01 08:58:04 [32mDATA: [39m test-node.js result: performance: load: 334 total: 5263
|
2021-06-02 13:20:37 [32mDATA: [39m test-node.js result: performance: load: 332 total: 5496
|
||||||
2021-06-01 08:58:04 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-face.jpg default
|
2021-06-02 13:20:37 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||||
2021-06-01 08:58:04 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
2021-06-02 13:20:37 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||||
2021-06-01 08:58:04 [32mDATA: [39m test-node.js result: performance: load: 2 total: 5263
|
2021-06-02 13:20:37 [32mDATA: [39m test-node.js result: performance: load: 5 total: 5496
|
||||||
2021-06-01 08:58:04 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg default
|
2021-06-02 13:20:37 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||||
2021-06-01 08:58:04 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
2021-06-02 13:20:37 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:58:04 [32mDATA: [39m test-node.js result: performance: load: 334 total: 5263
|
2021-06-02 13:20:37 [32mDATA: [39m test-node.js result: performance: load: 332 total: 5496
|
||||||
2021-06-01 08:58:04 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg default
|
2021-06-02 13:20:37 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||||
2021-06-01 08:58:04 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
2021-06-02 13:20:37 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:58:04 [32mDATA: [39m test-node.js result: performance: load: 2 total: 5263
|
2021-06-02 13:20:37 [32mDATA: [39m test-node.js result: performance: load: 5 total: 5496
|
||||||
2021-06-01 08:58:04 [36mINFO: [39m test-node.js test complete: 18675 ms
|
2021-06-02 13:20:37 [36mINFO: [39m test-node.js test complete: 18941 ms
|
||||||
2021-06-01 08:58:04 [36mINFO: [39m test-node-gpu.js start
|
2021-06-02 13:20:37 [36mINFO: [39m test-node-gpu.js start
|
||||||
2021-06-01 08:58:04 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-01 08:58:04.942701: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
2021-06-02 13:20:37 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-02 13:20:37.822445: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||||
2021-06-01 08:58:04 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-01 08:58:04.994440: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
2021-06-02 13:20:37 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-02 13:20:37.875354: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||||
2021-06-01 08:58:04 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-01 08:58:04.994476: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
2021-06-02 13:20:37 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-02 13:20:37.875595: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||||
2021-06-01 08:58:05 [35mSTATE:[39m test-node-gpu.js passed: create human
|
2021-06-02 13:20:37 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||||
2021-06-01 08:58:05 [36mINFO: [39m test-node-gpu.js human version: 2.0.0
|
2021-06-02 13:20:37 [36mINFO: [39m test-node-gpu.js human version: 2.0.0
|
||||||
2021-06-01 08:58:05 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
|
2021-06-02 13:20:37 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
|
||||||
2021-06-01 08:58:05 [36mINFO: [39m test-node-gpu.js tfjs version: 3.6.0
|
2021-06-02 13:20:37 [36mINFO: [39m test-node-gpu.js tfjs version: 3.6.0
|
||||||
2021-06-01 08:58:05 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
2021-06-02 13:20:38 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||||
2021-06-01 08:58:05 [35mSTATE:[39m test-node-gpu.js passed: load models
|
2021-06-02 13:20:38 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||||
2021-06-01 08:58:05 [35mSTATE:[39m test-node-gpu.js result: defined models: 13 loaded models: 6
|
2021-06-02 13:20:38 [35mSTATE:[39m test-node-gpu.js result: defined models: 13 loaded models: 6
|
||||||
2021-06-01 08:58:05 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
2021-06-02 13:20:38 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||||
2021-06-01 08:58:06 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
2021-06-02 13:20:40 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||||
2021-06-01 08:58:06 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
2021-06-02 13:20:40 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
||||||
2021-06-01 08:58:06 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 1468
|
2021-06-02 13:20:40 [32mDATA: [39m test-node-gpu.js result: performance: load: 328 total: 1683
|
||||||
2021-06-01 08:58:08 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
2021-06-02 13:20:41 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||||
2021-06-01 08:58:08 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
2021-06-02 13:20:41 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:58:08 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 1521
|
2021-06-02 13:20:41 [32mDATA: [39m test-node-gpu.js result: performance: load: 328 total: 1535
|
||||||
2021-06-01 08:58:08 [36mINFO: [39m test-node-gpu.js test body variants
|
2021-06-02 13:20:41 [36mINFO: [39m test-node-gpu.js test body variants
|
||||||
2021-06-01 08:58:09 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:20:42 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:58:10 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
|
2021-06-02 13:20:43 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
|
||||||
2021-06-01 08:58:10 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
2021-06-02 13:20:43 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||||
2021-06-01 08:58:10 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 977
|
2021-06-02 13:20:43 [32mDATA: [39m test-node-gpu.js result: performance: load: 328 total: 977
|
||||||
2021-06-01 08:58:11 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:20:44 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:58:11 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg movenet
|
2021-06-02 13:20:44 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
|
||||||
2021-06-01 08:58:11 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
2021-06-02 13:20:44 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:58:11 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 270
|
2021-06-02 13:20:44 [32mDATA: [39m test-node-gpu.js result: performance: load: 328 total: 336
|
||||||
2021-06-01 08:58:12 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
2021-06-02 13:20:45 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||||
2021-06-01 08:58:12 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0,"keypoints":0}
|
2021-06-02 13:20:45 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0,"keypoints":0}
|
||||||
2021-06-01 08:58:12 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 167
|
2021-06-02 13:20:45 [32mDATA: [39m test-node-gpu.js result: performance: load: 328 total: 181
|
||||||
2021-06-01 08:58:12 [36mINFO: [39m test-node-gpu.js test: first instance
|
2021-06-02 13:20:45 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||||
2021-06-01 08:58:12 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
2021-06-02 13:20:45 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||||
2021-06-01 08:58:12 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/sample-me.jpg default
|
2021-06-02 13:20:45 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||||
2021-06-01 08:58:12 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.67,"keypoints":7}
|
2021-06-02 13:20:45 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.78,"keypoints":7}
|
||||||
2021-06-01 08:58:12 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 93
|
2021-06-02 13:20:45 [32mDATA: [39m test-node-gpu.js result: performance: load: 328 total: 121
|
||||||
2021-06-01 08:58:12 [36mINFO: [39m test-node-gpu.js test: second instance
|
2021-06-02 13:20:45 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||||
2021-06-01 08:58:12 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
2021-06-02 13:20:46 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||||
2021-06-01 08:58:14 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/sample-me.jpg default
|
2021-06-02 13:20:47 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||||
2021-06-01 08:58:14 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"age":39.2,"gender":"male"} {"score":0.68,"class":"person"} {"score":0.67,"keypoints":7}
|
2021-06-02 13:20:47 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||||
2021-06-01 08:58:14 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 1460
|
2021-06-02 13:20:47 [32mDATA: [39m test-node-gpu.js result: performance: load: 4 total: 1327
|
||||||
2021-06-01 08:58:14 [36mINFO: [39m test-node-gpu.js test: concurrent
|
2021-06-02 13:20:47 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||||
2021-06-01 08:58:14 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
2021-06-02 13:20:47 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||||
2021-06-01 08:58:14 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
2021-06-02 13:20:47 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||||
2021-06-01 08:58:15 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:20:48 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:58:16 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:20:49 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:58:21 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
|
2021-06-02 13:20:55 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||||
2021-06-01 08:58:21 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
2021-06-02 13:20:55 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||||
2021-06-01 08:58:21 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 5282
|
2021-06-02 13:20:55 [32mDATA: [39m test-node-gpu.js result: performance: load: 328 total: 5502
|
||||||
2021-06-01 08:58:21 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
|
2021-06-02 13:20:55 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||||
2021-06-01 08:58:21 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
2021-06-02 13:20:55 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||||
2021-06-01 08:58:21 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 5282
|
2021-06-02 13:20:55 [32mDATA: [39m test-node-gpu.js result: performance: load: 4 total: 5502
|
||||||
2021-06-01 08:58:21 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
|
2021-06-02 13:20:55 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||||
2021-06-01 08:58:21 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
2021-06-02 13:20:55 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:58:21 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 5282
|
2021-06-02 13:20:55 [32mDATA: [39m test-node-gpu.js result: performance: load: 328 total: 5502
|
||||||
2021-06-01 08:58:21 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
|
2021-06-02 13:20:55 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||||
2021-06-01 08:58:21 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
2021-06-02 13:20:55 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:58:21 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 5282
|
2021-06-02 13:20:55 [32mDATA: [39m test-node-gpu.js result: performance: load: 4 total: 5502
|
||||||
2021-06-01 08:58:21 [36mINFO: [39m test-node-gpu.js test complete: 16736 ms
|
2021-06-02 13:20:55 [36mINFO: [39m test-node-gpu.js test complete: 17165 ms
|
||||||
2021-06-01 08:58:21 [36mINFO: [39m test-node-wasm.js start
|
2021-06-02 13:20:55 [36mINFO: [39m test-node-wasm.js start
|
||||||
2021-06-01 08:58:22 [35mSTATE:[39m test-node-wasm.js passed: model server: http://localhost:10030/models/
|
2021-06-02 13:20:55 [35mSTATE:[39m test-node-wasm.js passed: model server: http://localhost:10030/models/
|
||||||
2021-06-01 08:58:22 [35mSTATE:[39m test-node-wasm.js passed: create human
|
2021-06-02 13:20:55 [35mSTATE:[39m test-node-wasm.js passed: create human
|
||||||
2021-06-01 08:58:22 [36mINFO: [39m test-node-wasm.js human version: 2.0.0
|
2021-06-02 13:20:55 [36mINFO: [39m test-node-wasm.js human version: 2.0.0
|
||||||
2021-06-01 08:58:22 [36mINFO: [39m test-node-wasm.js platform: linux x64 agent: NodeJS v16.0.0
|
2021-06-02 13:20:55 [36mINFO: [39m test-node-wasm.js platform: linux x64 agent: NodeJS v16.0.0
|
||||||
2021-06-01 08:58:22 [36mINFO: [39m test-node-wasm.js tfjs version: 3.6.0
|
2021-06-02 13:20:55 [36mINFO: [39m test-node-wasm.js tfjs version: 3.6.0
|
||||||
2021-06-01 08:58:22 [35mSTATE:[39m test-node-wasm.js passed: set backend: wasm
|
2021-06-02 13:20:56 [35mSTATE:[39m test-node-wasm.js passed: set backend: wasm
|
||||||
2021-06-01 08:58:22 [35mSTATE:[39m test-node-wasm.js passed: load models
|
2021-06-02 13:20:56 [35mSTATE:[39m test-node-wasm.js passed: load models
|
||||||
2021-06-01 08:58:22 [35mSTATE:[39m test-node-wasm.js result: defined models: 13 loaded models: 5
|
2021-06-02 13:20:56 [35mSTATE:[39m test-node-wasm.js result: defined models: 13 loaded models: 5
|
||||||
2021-06-01 08:58:22 [35mSTATE:[39m test-node-wasm.js passed: warmup: none default
|
2021-06-02 13:20:56 [35mSTATE:[39m test-node-wasm.js passed: warmup: none default
|
||||||
2021-06-01 08:58:22 [31mERROR:[39m test-node-wasm.js failed: warmup: face default
|
2021-06-02 13:20:56 [31mERROR:[39m test-node-wasm.js failed: warmup: face default
|
||||||
2021-06-01 08:58:22 [31mERROR:[39m test-node-wasm.js failed: warmup: body default
|
2021-06-02 13:20:56 [31mERROR:[39m test-node-wasm.js failed: warmup: body default
|
||||||
2021-06-01 08:58:22 [36mINFO: [39m test-node-wasm.js test body variants
|
2021-06-02 13:20:56 [36mINFO: [39m test-node-wasm.js test body variants
|
||||||
2021-06-01 08:58:24 [35mSTATE:[39m test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:20:58 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:58:28 [35mSTATE:[39m test-node-wasm.js passed: detect: assets/human-sample-body.jpg posenet
|
2021-06-02 13:21:01 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-body.jpg posenet
|
||||||
2021-06-01 08:58:28 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.96,"keypoints":16}
|
2021-06-02 13:21:01 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.96,"keypoints":16}
|
||||||
2021-06-01 08:58:28 [32mDATA: [39m test-node-wasm.js result: performance: load: 666 total: 3243
|
2021-06-02 13:21:01 [32mDATA: [39m test-node-wasm.js result: performance: load: 698 total: 3201
|
||||||
2021-06-01 08:58:30 [35mSTATE:[39m test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:21:03 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:58:32 [35mSTATE:[39m test-node-wasm.js passed: detect: assets/human-sample-body.jpg movenet
|
2021-06-02 13:21:05 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-body.jpg movenet
|
||||||
2021-06-01 08:58:32 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.93,"keypoints":17}
|
2021-06-02 13:21:05 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:58:32 [32mDATA: [39m test-node-wasm.js result: performance: load: 666 total: 1979
|
2021-06-02 13:21:05 [32mDATA: [39m test-node-wasm.js result: performance: load: 698 total: 1905
|
||||||
2021-06-01 08:58:32 [35mSTATE:[39m test-node-wasm.js passed: detect: random default
|
2021-06-02 13:21:05 [35mSTATE:[39m test-node-wasm.js passed: detect: random default
|
||||||
2021-06-01 08:58:32 [32mDATA: [39m test-node-wasm.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
2021-06-02 13:21:05 [32mDATA: [39m test-node-wasm.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||||
2021-06-01 08:58:32 [32mDATA: [39m test-node-wasm.js result: performance: load: 666 total: 232
|
2021-06-02 13:21:05 [32mDATA: [39m test-node-wasm.js result: performance: load: 698 total: 232
|
||||||
2021-06-01 08:58:32 [36mINFO: [39m test-node-wasm.js test: first instance
|
2021-06-02 13:21:05 [36mINFO: [39m test-node-wasm.js test: first instance
|
||||||
2021-06-01 08:58:33 [35mSTATE:[39m test-node-wasm.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
2021-06-02 13:21:06 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||||
2021-06-01 08:58:33 [35mSTATE:[39m test-node-wasm.js passed: detect: assets/sample-me.jpg default
|
2021-06-02 13:21:06 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-upper.jpg default
|
||||||
2021-06-01 08:58:33 [32mDATA: [39m test-node-wasm.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":0.67,"keypoints":7}
|
2021-06-02 13:21:06 [32mDATA: [39m test-node-wasm.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":0.78,"keypoints":7}
|
||||||
2021-06-01 08:58:33 [32mDATA: [39m test-node-wasm.js result: performance: load: 666 total: 239
|
2021-06-02 13:21:06 [32mDATA: [39m test-node-wasm.js result: performance: load: 698 total: 236
|
||||||
2021-06-01 08:58:33 [36mINFO: [39m test-node-wasm.js test: second instance
|
2021-06-02 13:21:06 [36mINFO: [39m test-node-wasm.js test: second instance
|
||||||
2021-06-01 08:58:33 [35mSTATE:[39m test-node-wasm.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
2021-06-02 13:21:07 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||||
2021-06-01 08:58:36 [35mSTATE:[39m test-node-wasm.js passed: detect: assets/sample-me.jpg default
|
2021-06-02 13:21:09 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-upper.jpg default
|
||||||
2021-06-01 08:58:36 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 0 person: 1 {"age":39.2,"gender":"male"} {} {"score":0.67,"keypoints":7}
|
2021-06-02 13:21:09 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":29.5,"gender":"female"} {} {"score":0.78,"keypoints":7}
|
||||||
2021-06-01 08:58:36 [32mDATA: [39m test-node-wasm.js result: performance: load: 4 total: 2370
|
2021-06-02 13:21:09 [32mDATA: [39m test-node-wasm.js result: performance: load: 5 total: 2340
|
||||||
2021-06-01 08:58:36 [36mINFO: [39m test-node-wasm.js test: concurrent
|
2021-06-02 13:21:09 [36mINFO: [39m test-node-wasm.js test: concurrent
|
||||||
2021-06-01 08:58:36 [35mSTATE:[39m test-node-wasm.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
2021-06-02 13:21:09 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||||
2021-06-01 08:58:36 [35mSTATE:[39m test-node-wasm.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
2021-06-02 13:21:09 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||||
2021-06-01 08:58:38 [35mSTATE:[39m test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:21:11 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:58:40 [35mSTATE:[39m test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-06-02 13:21:13 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||||
2021-06-01 08:58:49 [35mSTATE:[39m test-node-wasm.js passed: detect: assets/human-sample-face.jpg default
|
2021-06-02 13:21:22 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-face.jpg default
|
||||||
2021-06-01 08:58:49 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 0 person: 1 {"age":23.6,"gender":"female"} {} {"score":0.73,"keypoints":17}
|
2021-06-02 13:21:22 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 0 person: 1 {"age":23.6,"gender":"female"} {} {"score":0.73,"keypoints":17}
|
||||||
2021-06-01 08:58:49 [32mDATA: [39m test-node-wasm.js result: performance: load: 666 total: 9077
|
2021-06-02 13:21:22 [32mDATA: [39m test-node-wasm.js result: performance: load: 698 total: 9110
|
||||||
2021-06-01 08:58:49 [35mSTATE:[39m test-node-wasm.js passed: detect: assets/human-sample-face.jpg default
|
2021-06-02 13:21:22 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-face.jpg default
|
||||||
2021-06-01 08:58:49 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 0 person: 1 {"age":23.6,"gender":"female"} {} {"score":0.73,"keypoints":17}
|
2021-06-02 13:21:22 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 0 person: 1 {"age":23.6,"gender":"female"} {} {"score":0.73,"keypoints":17}
|
||||||
2021-06-01 08:58:49 [32mDATA: [39m test-node-wasm.js result: performance: load: 4 total: 9077
|
2021-06-02 13:21:22 [32mDATA: [39m test-node-wasm.js result: performance: load: 5 total: 9110
|
||||||
2021-06-01 08:58:49 [35mSTATE:[39m test-node-wasm.js passed: detect: assets/human-sample-body.jpg default
|
2021-06-02 13:21:22 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-body.jpg default
|
||||||
2021-06-01 08:58:49 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.93,"keypoints":17}
|
2021-06-02 13:21:22 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:58:49 [32mDATA: [39m test-node-wasm.js result: performance: load: 666 total: 9077
|
2021-06-02 13:21:22 [32mDATA: [39m test-node-wasm.js result: performance: load: 698 total: 9110
|
||||||
2021-06-01 08:58:49 [35mSTATE:[39m test-node-wasm.js passed: detect: assets/human-sample-body.jpg default
|
2021-06-02 13:21:22 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-body.jpg default
|
||||||
2021-06-01 08:58:49 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.93,"keypoints":17}
|
2021-06-02 13:21:22 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.93,"keypoints":17}
|
||||||
2021-06-01 08:58:49 [32mDATA: [39m test-node-wasm.js result: performance: load: 4 total: 9077
|
2021-06-02 13:21:22 [32mDATA: [39m test-node-wasm.js result: performance: load: 5 total: 9110
|
||||||
2021-06-01 08:58:49 [36mINFO: [39m test-node-wasm.js test complete: 27096 ms
|
2021-06-02 13:21:22 [36mINFO: [39m test-node-wasm.js test complete: 26992 ms
|
||||||
2021-06-01 08:58:49 [36mINFO: [39m status: {"passed":68,"failed":2}
|
2021-06-02 13:21:22 [36mINFO: [39m status: {"passed":68,"failed":2}
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
# Human Library: TypeDoc API Specification
|
||||||
|
|
||||||
|
Automatically generated API specification for `Human` library
|
||||||
|
Not required for normal funcioning of library
|
Loading…
Reference in New Issue