mirror of https://github.com/vladmandic/human
add named exports
parent
a0563a3b91
commit
4b6a25f748
|
@ -9,7 +9,7 @@
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2022/10/18 mandic00@live.com
|
||||
### **HEAD -> main** 2022/10/24 mandic00@live.com
|
||||
|
||||
- add draw label templates
|
||||
- reduce dev dependencies
|
||||
|
|
14
package.json
14
package.json
|
@ -17,7 +17,13 @@
|
|||
"import": "./dist/human.esm.js",
|
||||
"script": "./dist/human.js",
|
||||
"module": "./dist/human.esm.js",
|
||||
"types": "./types/human.d.ts"
|
||||
"types": "./types/human.d.ts",
|
||||
"dist/human.js": "./dist/human.js",
|
||||
"dist/human.esm.js": "./dist/human.esm.js",
|
||||
"dist/human.esm-nobundle.js": "./dist/human.esm-nobundle.js",
|
||||
"dist/human.node.js": "./dist/human.node.js",
|
||||
"dist/human.node-wasm.js": "./dist/human.node-wasm.js",
|
||||
"dist/human.node-gpu.js": "./dist/human.node-gpu.js"
|
||||
},
|
||||
"author": "Vladimir Mandic <mandic00@live.com>",
|
||||
"bugs": {
|
||||
|
@ -68,7 +74,7 @@
|
|||
"devDependencies": {
|
||||
"@html-eslint/eslint-plugin": "^0.15.0",
|
||||
"@html-eslint/parser": "^0.15.0",
|
||||
"@microsoft/api-extractor": "^7.33.4",
|
||||
"@microsoft/api-extractor": "^7.33.5",
|
||||
"@tensorflow/tfjs-backend-cpu": "^4.0.0",
|
||||
"@tensorflow/tfjs-backend-wasm": "^4.0.0",
|
||||
"@tensorflow/tfjs-backend-webgl": "^4.0.0",
|
||||
|
@ -77,7 +83,7 @@
|
|||
"@tensorflow/tfjs-core": "^4.0.0",
|
||||
"@tensorflow/tfjs-node": "^4.0.0",
|
||||
"@tensorflow/tfjs-node-gpu": "^4.0.0",
|
||||
"@types/node": "^18.11.4",
|
||||
"@types/node": "^18.11.7",
|
||||
"@types/offscreencanvas": "^2019.7.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.41.0",
|
||||
"@typescript-eslint/parser": "^5.41.0",
|
||||
|
@ -95,7 +101,7 @@
|
|||
"eslint-plugin-promise": "^6.1.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"tslib": "^2.4.0",
|
||||
"typedoc": "0.23.18",
|
||||
"typedoc": "0.23.19",
|
||||
"typescript": "4.8.4"
|
||||
}
|
||||
}
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 50 KiB After Width: | Height: | Size: 59 KiB |
|
@ -1,40 +1,40 @@
|
|||
2022-10-24 15:33:37 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-10-24 15:33:37 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-10-24 15:33:37 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2022-10-24 15:33:37 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.12","typescript":"4.8.4","typedoc":"0.23.18","eslint":"8.26.0"}
|
||||
2022-10-24 15:33:37 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":79,"inputBytes":672207,"outputBytes":316303}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":79,"inputBytes":672211,"outputBytes":316307}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":79,"inputBytes":673159,"outputBytes":316418}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":79,"inputBytes":671953,"outputBytes":314908}
|
||||
2022-10-24 15:33:37 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1144900}
|
||||
2022-10-24 15:33:38 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":79,"inputBytes":1816183,"outputBytes":1456466}
|
||||
2022-10-24 15:33:38 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":79,"inputBytes":1816183,"outputBytes":1913830}
|
||||
2022-10-24 15:33:42 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||
2022-10-24 15:33:44 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":76,"generated":true}
|
||||
2022-10-24 15:33:44 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5936,"outputBytes":2867}
|
||||
2022-10-24 15:33:44 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17134,"outputBytes":9181}
|
||||
2022-10-24 15:33:52 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":116,"errors":0,"warnings":0}
|
||||
2022-10-24 15:33:52 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2022-10-24 15:33:52 [35mSTATE:[39m Copy: {"input":"src/tfjs","output":"dist/tfjs.esm.d.ts"}
|
||||
2022-10-24 15:33:52 [36mINFO: [39m Done...
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":195}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Link: {"input":"types/human.d.ts"}
|
||||
2022-10-24 15:33:53 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Models {"folder":"../human-models/models","models":43}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||
2022-10-24 15:33:53 [35mSTATE:[39m Models: {"count":58,"totalSize":386543911}
|
||||
2022-10-24 15:33:53 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||
2022-10-28 09:26:00 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-10-28 09:26:00 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-10-28 09:26:00 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2022-10-28 09:26:00 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.12","typescript":"4.8.4","typedoc":"0.23.19","eslint":"8.26.0"}
|
||||
2022-10-28 09:26:00 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":79,"inputBytes":672529,"outputBytes":316303}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":79,"inputBytes":672533,"outputBytes":316307}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":79,"inputBytes":673481,"outputBytes":316418}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":79,"inputBytes":672275,"outputBytes":314908}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1144900}
|
||||
2022-10-28 09:26:00 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":79,"inputBytes":1816505,"outputBytes":1456466}
|
||||
2022-10-28 09:26:01 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":79,"inputBytes":1816505,"outputBytes":1913830}
|
||||
2022-10-28 09:26:04 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||
2022-10-28 09:26:06 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":76,"generated":true}
|
||||
2022-10-28 09:26:06 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5936,"outputBytes":2867}
|
||||
2022-10-28 09:26:06 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17134,"outputBytes":9181}
|
||||
2022-10-28 09:26:14 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":116,"errors":0,"warnings":0}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Copy: {"input":"src/tfjs","output":"dist/tfjs.esm.d.ts"}
|
||||
2022-10-28 09:26:15 [36mINFO: [39m Done...
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":195}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Link: {"input":"types/human.d.ts"}
|
||||
2022-10-28 09:26:15 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Models {"folder":"../human-models/models","models":43}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||
2022-10-28 09:26:15 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||
2022-10-28 09:26:16 [35mSTATE:[39m Models: {"count":58,"totalSize":386543911}
|
||||
2022-10-28 09:26:16 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||
|
|
666
test/test.log
666
test/test.log
|
@ -1,659 +1,7 @@
|
|||
2022-10-17 10:35:06 [36mINFO: [39m @vladmandic/human version 3.0.0
|
||||
2022-10-17 10:35:06 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v18.10.0
|
||||
2022-10-17 10:35:06 [36mINFO: [39m demos: [{"cmd":"../demo/nodejs/node.js","args":[]},{"cmd":"../demo/nodejs/node-simple.js","args":[]},{"cmd":"../demo/nodejs/node-event.js","args":["samples/in/ai-body.jpg"]},{"cmd":"../demo/nodejs/node-similarity.js","args":["samples/in/ai-face.jpg","samples/in/ai-upper.jpg"]},{"cmd":"../demo/nodejs/node-canvas.js","args":["samples/in/ai-body.jpg","samples/out/ai-body.jpg"]},{"cmd":"../demo/nodejs/process-folder.js","args":["samples"]},{"cmd":"../demo/multithread/node-multiprocess.js","args":[]},{"cmd":"../demo/facematch/node-match.js","args":[]}]
|
||||
2022-10-17 10:35:06 [36mINFO: [39m {"cmd":"../demo/nodejs/node.js","args":[]} start
|
||||
2022-10-17 10:35:08 [36mINFO: [39m {"cmd":"../demo/nodejs/node-simple.js","args":[]} start
|
||||
2022-10-17 10:35:08 [36mINFO: [39m {"cmd":"../demo/nodejs/node-event.js","args":["samples/in/ai-body.jpg"]} start
|
||||
2022-10-17 10:35:09 [36mINFO: [39m {"cmd":"../demo/nodejs/node-similarity.js","args":["samples/in/ai-face.jpg","samples/in/ai-upper.jpg"]} start
|
||||
2022-10-17 10:35:10 [36mINFO: [39m {"cmd":"../demo/nodejs/node-canvas.js","args":["samples/in/ai-body.jpg","samples/out/ai-body.jpg"]} start
|
||||
2022-10-17 10:35:10 [36mINFO: [39m {"cmd":"../demo/nodejs/process-folder.js","args":["samples"]} start
|
||||
2022-10-17 10:35:13 [36mINFO: [39m {"cmd":"../demo/multithread/node-multiprocess.js","args":[]} start
|
||||
2022-10-17 10:35:25 [36mINFO: [39m {"cmd":"../demo/facematch/node-match.js","args":[]} start
|
||||
2022-10-17 10:35:27 [36mINFO: [39m tests: ["test-node-load.js","test-node-gear.js","test-backend-node.js","test-backend-node-gpu.js","test-backend-node-wasm.js"]
|
||||
2022-10-17 10:35:27 [36mINFO: [39m
|
||||
2022-10-17 10:35:27 [36mINFO: [39m test-node-load.js start
|
||||
2022-10-17 10:35:27 [36mINFO: [39m test-node-load.js load start {"human":"3.0.0","tf":"4.0.0","progress":0}
|
||||
2022-10-17 10:35:27 [32mDATA: [39m test-node-load.js load interval {"elapsed":1,"progress":0}
|
||||
2022-10-17 10:35:27 [32mDATA: [39m test-node-load.js load interval {"elapsed":11,"progress":0}
|
||||
2022-10-17 10:35:27 [32mDATA: [39m test-node-load.js load interval {"elapsed":24,"progress":0.05339166087267679}
|
||||
2022-10-17 10:35:27 [32mDATA: [39m test-node-load.js load interval {"elapsed":33,"progress":0.2135162934143239}
|
||||
2022-10-17 10:35:27 [32mDATA: [39m test-node-load.js load interval {"elapsed":57,"progress":0.3299591712723044}
|
||||
2022-10-17 10:35:27 [32mDATA: [39m test-node-load.js load interval {"elapsed":73,"progress":0.5125946867158943}
|
||||
2022-10-17 10:35:27 [32mDATA: [39m test-node-load.js load interval {"elapsed":82,"progress":0.7259096583739463}
|
||||
2022-10-17 10:35:27 [35mSTATE:[39m test-node-load.js passed {"progress":1}
|
||||
2022-10-17 10:35:27 [36mINFO: [39m test-node-load.js load final {"progress":1}
|
||||
2022-10-17 10:35:28 [32mDATA: [39m test-node-load.js load interval {"elapsed":454,"progress":1}
|
||||
2022-10-17 10:35:28 [36mINFO: [39m
|
||||
2022-10-17 10:35:28 [36mINFO: [39m test-node-gear.js start
|
||||
2022-10-17 10:35:28 [32mDATA: [39m test-node-gear.js input: ["samples/in/ai-face.jpg"]
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-node-gear.js passed: gear faceres samples/in/ai-face.jpg
|
||||
2022-10-17 10:35:29 [32mDATA: [39m test-node-gear.js results {"face":0,"model":"faceres","image":"samples/in/ai-face.jpg","age":23.5,"gender":"female","genderScore":0.92}
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-node-gear.js passed: gear gear samples/in/ai-face.jpg
|
||||
2022-10-17 10:35:29 [32mDATA: [39m test-node-gear.js results {"face":0,"model":"gear","image":"samples/in/ai-face.jpg","age":23.3,"gender":"female","genderScore":0.51,"race":[{"score":0.93,"race":"white"}]}
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-node-gear.js passed: gear ssrnet samples/in/ai-face.jpg
|
||||
2022-10-17 10:35:29 [32mDATA: [39m test-node-gear.js results {"face":0,"model":"ssrnet","image":"samples/in/ai-face.jpg","age":23.4,"gender":"female","genderScore":0.99}
|
||||
2022-10-17 10:35:29 [36mINFO: [39m
|
||||
2022-10-17 10:35:29 [36mINFO: [39m test-backend-node.js start
|
||||
2022-10-17 10:35:29 [36mINFO: [39m test-backend-node.js test: configuration validation
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js passed: configuration default validation []
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js passed: configuration invalid validation [{"reason":"unknown property","where":"config.invalid = true"}]
|
||||
2022-10-17 10:35:29 [36mINFO: [39m test-backend-node.js test: model load
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js passed: models loaded 25 11 [{"name":"ssrnetage","loaded":false,"url":null},{"name":"gear","loaded":false,"url":null},{"name":"blazeposedetect","loaded":false,"url":null},{"name":"blazepose","loaded":false,"url":null},{"name":"centernet","loaded":true,"url":"file://models/mb3-centernet.json"},{"name":"efficientpose","loaded":false,"url":null},{"name":"mobilefacenet","loaded":false,"url":null},{"name":"insightface","loaded":false,"url":null},{"name":"emotion","loaded":true,"url":"file://models/emotion.json"},{"name":"facedetect","loaded":true,"url":"file://models/blazeface.json"},{"name":"faceiris","loaded":true,"url":"file://models/iris.json"},{"name":"facemesh","loaded":true,"url":"file://models/facemesh.json"},{"name":"faceres","loaded":true,"url":"file://models/faceres.json"},{"name":"ssrnetgender","loaded":false,"url":null},{"name":"handpose","loaded":false,"url":null},{"name":"handskeleton","loaded":true,"url":"file://models/handlandmark-full.json"},{"name":"handtrack","loaded":true,"url":"file://models/handtrack.json"},{"name":"liveness","loaded":true,"url":"file://models/liveness.json"},{"name":"meet","loaded":false,"url":null},{"name":"movenet","loaded":true,"url":"file://models/movenet-lightning.json"},{"name":"nanodet","loaded":false,"url":null},{"name":"posenet","loaded":false,"url":null},{"name":"selfie","loaded":false,"url":null},{"name":"rvm","loaded":false,"url":null},{"name":"antispoof","loaded":true,"url":"file://models/antispoof.json"}]
|
||||
2022-10-17 10:35:29 [36mINFO: [39m test-backend-node.js memory: {"memory":{"unreliable":true,"numTensors":1785,"numDataBuffers":1785,"numBytes":63247332}}
|
||||
2022-10-17 10:35:29 [36mINFO: [39m test-backend-node.js state: {"state":{"registeredVariables":{},"nextTapeNodeId":0,"numBytes":63247332,"numTensors":1785,"numStringTensors":0,"numDataBuffers":1785,"gradientDepth":0,"kernelDepth":0,"scopeStack":[],"numDataMovesStack":[],"nextScopeId":0,"tensorInfo":{},"profiling":false,"activeProfile":{"newBytes":0,"newTensors":0,"peakBytes":0,"kernels":[],"result":null,"kernelNames":[]}}}
|
||||
2022-10-17 10:35:29 [36mINFO: [39m test-backend-node.js test: warmup
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js passed: create human
|
||||
2022-10-17 10:35:29 [36mINFO: [39m test-backend-node.js human version: 3.0.0
|
||||
2022-10-17 10:35:29 [36mINFO: [39m test-backend-node.js platform: linux x64 agent: NodeJS v18.10.0
|
||||
2022-10-17 10:35:29 [36mINFO: [39m test-backend-node.js tfjs version: 4.0.0
|
||||
2022-10-17 10:35:29 [36mINFO: [39m test-backend-node.js env: {"browser":false,"node":true,"platform":"linux x64","agent":"NodeJS v18.10.0","backends":["cpu","tensorflow"],"initial":false,"tfjs":{"version":"4.0.0"},"offscreen":false,"perfadd":false,"tensorflow":{"version":"2.9.1","gpu":false},"wasm":{"supported":true,"backend":false},"webgl":{"supported":false,"backend":false},"webgpu":{"supported":false,"backend":false},"cpu":{"flags":[]},"kernels":169}
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js passed: set backend: tensorflow
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js tensors 1785
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js passed: load models
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js result: defined models: 25 loaded models: 11
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js passed: warmup: none default
|
||||
2022-10-17 10:35:29 [32mDATA: [39m test-backend-node.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {}
|
||||
2022-10-17 10:35:29 [32mDATA: [39m test-backend-node.js result: performance: load: null total: null
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js passed: warmup none result match
|
||||
2022-10-17 10:35:29 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js event: warmup
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js passed: warmup: face default
|
||||
2022-10-17 10:35:30 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
|
||||
2022-10-17 10:35:30 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 438
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js passed: warmup face result match
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js event: warmup
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js passed: warmup: body default
|
||||
2022-10-17 10:35:30 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:30 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 361
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js passed: warmup body result match
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js details: {"face":{"boxScore":0.92,"faceScore":1,"age":23.7,"gender":"female","genderScore":0.97},"emotion":[{"score":0.63,"emotion":"angry"},{"score":0.22,"emotion":"fear"}],"body":{"score":0.92,"keypoints":17},"hand":{"boxScore":0.52,"fingerScore":0.73,"keypoints":21},"gestures":[{"face":0,"gesture":"facing right"},{"face":0,"gesture":"mouth 10% open"},{"hand":0,"gesture":"pinky forward"},{"hand":0,"gesture":"palm up"},{"hand":0,"gesture":"open palm"},{"iris":0,"gesture":"looking left"},{"iris":0,"gesture":"looking up"}]}
|
||||
2022-10-17 10:35:30 [36mINFO: [39m test-backend-node.js test: details verification
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js start default
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:30 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg default
|
||||
2022-10-17 10:35:31 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:31 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 350
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details face length 1
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details face score 1 0.93 1
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details face age/gender 23.7 female 0.97 85.47
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details face arrays 4 478 1024
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details face emotion 2 {"score":0.59,"emotion":"angry"}
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details face anti-spoofing 0.79
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details face liveness 0.83
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details body length 1
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details body 0.92 17 6
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details hand length 1
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details hand 0.51 0.73 point
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details hand arrays 21 5 7
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details gesture length 7
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details gesture first {"face":0,"gesture":"facing right"}
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details object length 1
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: details object 0.72 person
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996928}
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:31 [35mSTATE:[39m test-backend-node.js passed: tensor shape: [1,1200,1200,4] dtype: float32
|
||||
2022-10-17 10:35:32 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1200,1200,4] {"checksum":1371996928}
|
||||
2022-10-17 10:35:32 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:32 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:32 [35mSTATE:[39m test-backend-node.js passed: tensor shape: [1200,1200,4] dtype: float32
|
||||
2022-10-17 10:35:32 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:32 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:32 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:32 [35mSTATE:[39m test-backend-node.js passed: tensor shape: [1,1200,1200,3] dtype: float32
|
||||
2022-10-17 10:35:33 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:33 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:33 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:33 [35mSTATE:[39m test-backend-node.js passed: tensor shape: [1200,1200,3] dtype: float32
|
||||
2022-10-17 10:35:33 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996871}
|
||||
2022-10-17 10:35:33 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:33 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:33 [35mSTATE:[39m test-backend-node.js passed: tensor shape: [1,1200,1200,4] dtype: int32
|
||||
2022-10-17 10:35:33 [36mINFO: [39m test-backend-node.js test default
|
||||
2022-10-17 10:35:33 [35mSTATE:[39m test-backend-node.js start async
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg async
|
||||
2022-10-17 10:35:34 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:34 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 325
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js passed: default result face match 1 female 0.97
|
||||
2022-10-17 10:35:34 [36mINFO: [39m test-backend-node.js test sync
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js start sync
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg sync
|
||||
2022-10-17 10:35:34 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:34 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 326
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js passed: default sync 1 female 0.97
|
||||
2022-10-17 10:35:34 [36mINFO: [39m test-backend-node.js test: image process
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120}
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js passed: image input null [1,256,256,3]
|
||||
2022-10-17 10:35:34 [36mINFO: [39m test-backend-node.js test: image null
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js passed: invalid input could not convert input to tensor
|
||||
2022-10-17 10:35:34 [36mINFO: [39m test-backend-node.js test face similarity
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js start face similarity
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120}
|
||||
2022-10-17 10:35:34 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:35 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:35 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-face.jpg face similarity
|
||||
2022-10-17 10:35:35 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3}
|
||||
2022-10-17 10:35:35 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 315
|
||||
2022-10-17 10:35:35 [35mSTATE:[39m test-backend-node.js start face similarity
|
||||
2022-10-17 10:35:35 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:35 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:35 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:35 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg face similarity
|
||||
2022-10-17 10:35:35 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:35 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 307
|
||||
2022-10-17 10:35:35 [35mSTATE:[39m test-backend-node.js start face similarity
|
||||
2022-10-17 10:35:35 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-upper.jpg [1,720,688,3] {"checksum":151289024}
|
||||
2022-10-17 10:35:35 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-upper.jpg face similarity
|
||||
2022-10-17 10:35:36 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.75,"keypoints":7}
|
||||
2022-10-17 10:35:36 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 272
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js passed: face descriptor
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js passed: face similarity {"similarity":[1,0.44727441595492046,0.556793560189727],"descriptors":[1024,1024,1024]}
|
||||
2022-10-17 10:35:36 [36mINFO: [39m test-backend-node.js test object
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js start object
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg object
|
||||
2022-10-17 10:35:36 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:36 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 300
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js passed: centernet
|
||||
2022-10-17 10:35:36 [35mSTATE:[39m test-backend-node.js start object
|
||||
2022-10-17 10:35:37 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:37 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:37 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:37 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg object
|
||||
2022-10-17 10:35:37 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 3 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.86,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:37 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 315
|
||||
2022-10-17 10:35:37 [35mSTATE:[39m test-backend-node.js passed: nanodet
|
||||
2022-10-17 10:35:37 [36mINFO: [39m test-backend-node.js test sensitive
|
||||
2022-10-17 10:35:37 [35mSTATE:[39m test-backend-node.js start sensitive
|
||||
2022-10-17 10:35:37 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:37 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:38 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:38 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg sensitive
|
||||
2022-10-17 10:35:38 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:38 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 279
|
||||
2022-10-17 10:35:38 [35mSTATE:[39m test-backend-node.js passed: sensitive result match
|
||||
2022-10-17 10:35:38 [35mSTATE:[39m test-backend-node.js passed: sensitive face result match
|
||||
2022-10-17 10:35:38 [35mSTATE:[39m test-backend-node.js passed: sensitive face emotion result [{"score":0.59,"emotion":"angry"},{"score":0.29,"emotion":"fear"}]
|
||||
2022-10-17 10:35:38 [35mSTATE:[39m test-backend-node.js passed: sensitive body result match
|
||||
2022-10-17 10:35:38 [35mSTATE:[39m test-backend-node.js passed: sensitive hand result match
|
||||
2022-10-17 10:35:38 [36mINFO: [39m test-backend-node.js test body
|
||||
2022-10-17 10:35:38 [35mSTATE:[39m test-backend-node.js start blazepose
|
||||
2022-10-17 10:35:40 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:40 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:40 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:40 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg blazepose
|
||||
2022-10-17 10:35:40 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.99,"keypoints":39}
|
||||
2022-10-17 10:35:40 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 344
|
||||
2022-10-17 10:35:40 [35mSTATE:[39m test-backend-node.js passed: blazepose
|
||||
2022-10-17 10:35:40 [35mSTATE:[39m test-backend-node.js start efficientpose
|
||||
2022-10-17 10:35:40 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:40 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:41 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:41 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg efficientpose
|
||||
2022-10-17 10:35:41 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.75,"keypoints":13}
|
||||
2022-10-17 10:35:41 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 321
|
||||
2022-10-17 10:35:41 [35mSTATE:[39m test-backend-node.js passed: efficientpose
|
||||
2022-10-17 10:35:41 [35mSTATE:[39m test-backend-node.js start posenet
|
||||
2022-10-17 10:35:41 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:41 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg posenet
|
||||
2022-10-17 10:35:42 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.96,"keypoints":16}
|
||||
2022-10-17 10:35:42 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 269
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js passed: posenet
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js start movenet
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864}
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js event: detect
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js passed: detect: samples/in/ai-body.jpg movenet
|
||||
2022-10-17 10:35:42 [32mDATA: [39m test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:42 [32mDATA: [39m test-backend-node.js result: performance: load: null total: 279
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js passed: movenet
|
||||
2022-10-17 10:35:42 [36mINFO: [39m test-backend-node.js test face matching
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js passed: face database 40
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js passed: face match {"first":{"index":4,"similarity":0.7827852251220577}} {"second":{"index":4,"similarity":0.5002052057057577}} {"third":{"index":4,"similarity":0.5401588464054732}}
|
||||
2022-10-17 10:35:42 [36mINFO: [39m test-backend-node.js test face similarity alternative
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js start face embeddings
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120}
|
||||
2022-10-17 10:35:42 [35mSTATE:[39m test-backend-node.js event: image
|
||||
2022-10-17 10:35:42 [31mERROR:[39m test-backend-node.js failed: testDetect face embeddings
|
||||
2022-10-17 10:35:42 [31mERROR:[39m test-backend-node.js uncaughtException {"name":"TypeError","message":"Cannot read properties of undefined (reading 'img_inputs')","stack":["TypeError: Cannot read properties of undefined (reading 'img_inputs')"," at /home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30706:69"," at Array.reduce (<anonymous>)"," at GraphModel.normalizeInputs (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30705:32)"," at GraphModel.execute (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30777:23)"," at /home/vlado/dev/human/dist/human.node.js:99:175358"," at new Promise (<anonymous>)"," at q5 (/home/vlado/dev/human/dist/human.node.js:99:175127)"," at R1 (/home/vlado/dev/human/dist/human.node.js:111:11475)"," at process.processTicksAndRejections (node:internal/process/task_queues:95:5)"," at async /home/vlado/dev/human/dist/human.node.js:830:8854"]}
|
||||
2022-10-17 10:35:43 [36mINFO: [39m
|
||||
2022-10-17 10:35:43 [36mINFO: [39m test-backend-node-gpu.js start
|
||||
2022-10-17 10:35:44 [36mINFO: [39m test-backend-node-gpu.js test: configuration validation
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js passed: configuration default validation []
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js passed: configuration invalid validation [{"reason":"unknown property","where":"config.invalid = true"}]
|
||||
2022-10-17 10:35:44 [36mINFO: [39m test-backend-node-gpu.js test: model load
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js passed: models loaded 25 11 [{"name":"ssrnetage","loaded":false,"url":null},{"name":"gear","loaded":false,"url":null},{"name":"blazeposedetect","loaded":false,"url":null},{"name":"blazepose","loaded":false,"url":null},{"name":"centernet","loaded":true,"url":"file://models/mb3-centernet.json"},{"name":"efficientpose","loaded":false,"url":null},{"name":"mobilefacenet","loaded":false,"url":null},{"name":"insightface","loaded":false,"url":null},{"name":"emotion","loaded":true,"url":"file://models/emotion.json"},{"name":"facedetect","loaded":true,"url":"file://models/blazeface.json"},{"name":"faceiris","loaded":true,"url":"file://models/iris.json"},{"name":"facemesh","loaded":true,"url":"file://models/facemesh.json"},{"name":"faceres","loaded":true,"url":"file://models/faceres.json"},{"name":"ssrnetgender","loaded":false,"url":null},{"name":"handpose","loaded":false,"url":null},{"name":"handskeleton","loaded":true,"url":"file://models/handlandmark-full.json"},{"name":"handtrack","loaded":true,"url":"file://models/handtrack.json"},{"name":"liveness","loaded":true,"url":"file://models/liveness.json"},{"name":"meet","loaded":false,"url":null},{"name":"movenet","loaded":true,"url":"file://models/movenet-lightning.json"},{"name":"nanodet","loaded":false,"url":null},{"name":"posenet","loaded":false,"url":null},{"name":"selfie","loaded":false,"url":null},{"name":"rvm","loaded":false,"url":null},{"name":"antispoof","loaded":true,"url":"file://models/antispoof.json"}]
|
||||
2022-10-17 10:35:44 [36mINFO: [39m test-backend-node-gpu.js memory: {"memory":{"unreliable":true,"numTensors":1785,"numDataBuffers":1785,"numBytes":63247332}}
|
||||
2022-10-17 10:35:44 [36mINFO: [39m test-backend-node-gpu.js state: {"state":{"registeredVariables":{},"nextTapeNodeId":0,"numBytes":63247332,"numTensors":1785,"numStringTensors":0,"numDataBuffers":1785,"gradientDepth":0,"kernelDepth":0,"scopeStack":[],"numDataMovesStack":[],"nextScopeId":0,"tensorInfo":{},"profiling":false,"activeProfile":{"newBytes":0,"newTensors":0,"peakBytes":0,"kernels":[],"result":null,"kernelNames":[]}}}
|
||||
2022-10-17 10:35:44 [36mINFO: [39m test-backend-node-gpu.js test: warmup
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js passed: create human
|
||||
2022-10-17 10:35:44 [36mINFO: [39m test-backend-node-gpu.js human version: 3.0.0
|
||||
2022-10-17 10:35:44 [36mINFO: [39m test-backend-node-gpu.js platform: linux x64 agent: NodeJS v18.10.0
|
||||
2022-10-17 10:35:44 [36mINFO: [39m test-backend-node-gpu.js tfjs version: 4.0.0
|
||||
2022-10-17 10:35:44 [36mINFO: [39m test-backend-node-gpu.js env: {"browser":false,"node":true,"platform":"linux x64","agent":"NodeJS v18.10.0","backends":["cpu","tensorflow"],"initial":false,"tfjs":{"version":"4.0.0"},"offscreen":false,"perfadd":false,"tensorflow":{"version":"2.9.1","gpu":true},"wasm":{"supported":true,"backend":false},"webgl":{"supported":false,"backend":false},"webgpu":{"supported":false,"backend":false},"cpu":{"flags":[]},"kernels":169}
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js passed: set backend: tensorflow
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js tensors 1785
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js passed: load models
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js result: defined models: 25 loaded models: 11
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js passed: warmup: none default
|
||||
2022-10-17 10:35:44 [32mDATA: [39m test-backend-node-gpu.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {}
|
||||
2022-10-17 10:35:44 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: null
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js passed: warmup none result match
|
||||
2022-10-17 10:35:44 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js event: warmup
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: warmup: face default
|
||||
2022-10-17 10:35:47 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
|
||||
2022-10-17 10:35:47 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 2722
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: warmup face result match
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js event: warmup
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: warmup: body default
|
||||
2022-10-17 10:35:47 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:47 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 141
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: warmup body result match
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js details: {"face":{"boxScore":0.92,"faceScore":1,"age":23.7,"gender":"female","genderScore":0.97},"emotion":[{"score":0.63,"emotion":"angry"},{"score":0.22,"emotion":"fear"}],"body":{"score":0.92,"keypoints":17},"hand":{"boxScore":0.52,"fingerScore":0.73,"keypoints":21},"gestures":[{"face":0,"gesture":"facing right"},{"face":0,"gesture":"mouth 10% open"},{"hand":0,"gesture":"pinky forward"},{"hand":0,"gesture":"palm up"},{"hand":0,"gesture":"open palm"},{"iris":0,"gesture":"looking left"},{"iris":0,"gesture":"looking up"}]}
|
||||
2022-10-17 10:35:47 [36mINFO: [39m test-backend-node-gpu.js test: details verification
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js start default
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg default
|
||||
2022-10-17 10:35:47 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:47 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 134
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details face length 1
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details face score 1 0.93 1
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details face age/gender 23.7 female 0.97 85.47
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details face arrays 4 478 1024
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details face emotion 2 {"score":0.59,"emotion":"angry"}
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details face anti-spoofing 0.79
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details face liveness 0.83
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details body length 1
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details body 0.92 17 6
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details hand length 1
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details hand 0.51 0.73 point
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details hand arrays 21 5 7
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details gesture length 7
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details gesture first {"face":0,"gesture":"facing right"}
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details object length 1
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: details object 0.72 person
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996928}
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:47 [35mSTATE:[39m test-backend-node-gpu.js passed: tensor shape: [1,1200,1200,4] dtype: float32
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1200,1200,4] {"checksum":1371996928}
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js passed: tensor shape: [1200,1200,4] dtype: float32
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js passed: tensor shape: [1,1200,1200,3] dtype: float32
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:48 [35mSTATE:[39m test-backend-node-gpu.js passed: tensor shape: [1200,1200,3] dtype: float32
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996871}
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: tensor shape: [1,1200,1200,4] dtype: int32
|
||||
2022-10-17 10:35:49 [36mINFO: [39m test-backend-node-gpu.js test default
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js start async
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg async
|
||||
2022-10-17 10:35:49 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:49 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 119
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: default result face match 1 female 0.97
|
||||
2022-10-17 10:35:49 [36mINFO: [39m test-backend-node-gpu.js test sync
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js start sync
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg sync
|
||||
2022-10-17 10:35:49 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:49 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 124
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: default sync 1 female 0.97
|
||||
2022-10-17 10:35:49 [36mINFO: [39m test-backend-node-gpu.js test: image process
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120}
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: image input null [1,256,256,3]
|
||||
2022-10-17 10:35:49 [36mINFO: [39m test-backend-node-gpu.js test: image null
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: invalid input could not convert input to tensor
|
||||
2022-10-17 10:35:49 [36mINFO: [39m test-backend-node-gpu.js test face similarity
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js start face similarity
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120}
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-face.jpg face similarity
|
||||
2022-10-17 10:35:49 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3}
|
||||
2022-10-17 10:35:49 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 116
|
||||
2022-10-17 10:35:49 [35mSTATE:[39m test-backend-node-gpu.js start face similarity
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg face similarity
|
||||
2022-10-17 10:35:50 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:50 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 114
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js start face similarity
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-upper.jpg [1,720,688,3] {"checksum":151289056}
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-upper.jpg face similarity
|
||||
2022-10-17 10:35:50 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.75,"keypoints":7}
|
||||
2022-10-17 10:35:50 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 102
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js passed: face descriptor
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js passed: face similarity {"similarity":[1,0.4475002983522097,0.5570879556505012],"descriptors":[1024,1024,1024]}
|
||||
2022-10-17 10:35:50 [36mINFO: [39m test-backend-node-gpu.js test object
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js start object
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg object
|
||||
2022-10-17 10:35:50 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:50 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 132
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js passed: centernet
|
||||
2022-10-17 10:35:50 [35mSTATE:[39m test-backend-node-gpu.js start object
|
||||
2022-10-17 10:35:51 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:51 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:51 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:51 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg object
|
||||
2022-10-17 10:35:51 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 3 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.86,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:51 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 547
|
||||
2022-10-17 10:35:51 [35mSTATE:[39m test-backend-node-gpu.js passed: nanodet
|
||||
2022-10-17 10:35:51 [36mINFO: [39m test-backend-node-gpu.js test sensitive
|
||||
2022-10-17 10:35:51 [35mSTATE:[39m test-backend-node-gpu.js start sensitive
|
||||
2022-10-17 10:35:52 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:52 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:52 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:52 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg sensitive
|
||||
2022-10-17 10:35:52 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:52 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 105
|
||||
2022-10-17 10:35:52 [35mSTATE:[39m test-backend-node-gpu.js passed: sensitive result match
|
||||
2022-10-17 10:35:52 [35mSTATE:[39m test-backend-node-gpu.js passed: sensitive face result match
|
||||
2022-10-17 10:35:52 [35mSTATE:[39m test-backend-node-gpu.js passed: sensitive face emotion result [{"score":0.59,"emotion":"angry"},{"score":0.29,"emotion":"fear"}]
|
||||
2022-10-17 10:35:52 [35mSTATE:[39m test-backend-node-gpu.js passed: sensitive body result match
|
||||
2022-10-17 10:35:52 [35mSTATE:[39m test-backend-node-gpu.js passed: sensitive hand result match
|
||||
2022-10-17 10:35:52 [36mINFO: [39m test-backend-node-gpu.js test body
|
||||
2022-10-17 10:35:52 [35mSTATE:[39m test-backend-node-gpu.js start blazepose
|
||||
2022-10-17 10:35:54 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:54 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:54 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:54 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg blazepose
|
||||
2022-10-17 10:35:54 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.99,"keypoints":39}
|
||||
2022-10-17 10:35:54 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 267
|
||||
2022-10-17 10:35:54 [35mSTATE:[39m test-backend-node-gpu.js passed: blazepose
|
||||
2022-10-17 10:35:54 [35mSTATE:[39m test-backend-node-gpu.js start efficientpose
|
||||
2022-10-17 10:35:54 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:54 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:55 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:55 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg efficientpose
|
||||
2022-10-17 10:35:55 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.75,"keypoints":13}
|
||||
2022-10-17 10:35:55 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 1008
|
||||
2022-10-17 10:35:55 [35mSTATE:[39m test-backend-node-gpu.js passed: efficientpose
|
||||
2022-10-17 10:35:55 [35mSTATE:[39m test-backend-node-gpu.js start posenet
|
||||
2022-10-17 10:35:56 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:56 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:56 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:56 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg posenet
|
||||
2022-10-17 10:35:56 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.96,"keypoints":16}
|
||||
2022-10-17 10:35:56 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 182
|
||||
2022-10-17 10:35:56 [35mSTATE:[39m test-backend-node-gpu.js passed: posenet
|
||||
2022-10-17 10:35:56 [35mSTATE:[39m test-backend-node-gpu.js start movenet
|
||||
2022-10-17 10:35:56 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928}
|
||||
2022-10-17 10:35:56 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:57 [35mSTATE:[39m test-backend-node-gpu.js event: detect
|
||||
2022-10-17 10:35:57 [35mSTATE:[39m test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg movenet
|
||||
2022-10-17 10:35:57 [32mDATA: [39m test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:35:57 [32mDATA: [39m test-backend-node-gpu.js result: performance: load: null total: 119
|
||||
2022-10-17 10:35:57 [35mSTATE:[39m test-backend-node-gpu.js passed: movenet
|
||||
2022-10-17 10:35:57 [36mINFO: [39m test-backend-node-gpu.js test face matching
|
||||
2022-10-17 10:35:57 [35mSTATE:[39m test-backend-node-gpu.js passed: face database 40
|
||||
2022-10-17 10:35:57 [35mSTATE:[39m test-backend-node-gpu.js passed: face match {"first":{"index":4,"similarity":0.7829338043932047}} {"second":{"index":4,"similarity":0.5002928781584631}} {"third":{"index":4,"similarity":0.5402934771672516}}
|
||||
2022-10-17 10:35:57 [36mINFO: [39m test-backend-node-gpu.js test face similarity alternative
|
||||
2022-10-17 10:35:57 [35mSTATE:[39m test-backend-node-gpu.js start face embeddings
|
||||
2022-10-17 10:35:57 [35mSTATE:[39m test-backend-node-gpu.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120}
|
||||
2022-10-17 10:35:57 [35mSTATE:[39m test-backend-node-gpu.js event: image
|
||||
2022-10-17 10:35:57 [31mERROR:[39m test-backend-node-gpu.js failed: testDetect face embeddings
|
||||
2022-10-17 10:35:57 [31mERROR:[39m test-backend-node-gpu.js uncaughtException {"name":"TypeError","message":"Cannot read properties of undefined (reading 'img_inputs')","stack":["TypeError: Cannot read properties of undefined (reading 'img_inputs')"," at /home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30706:69"," at Array.reduce (<anonymous>)"," at GraphModel.normalizeInputs (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30705:32)"," at GraphModel.execute (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30777:23)"," at /home/vlado/dev/human/dist/human.node-gpu.js:99:175358"," at new Promise (<anonymous>)"," at q5 (/home/vlado/dev/human/dist/human.node-gpu.js:99:175127)"," at R1 (/home/vlado/dev/human/dist/human.node-gpu.js:111:11475)"," at process.processTicksAndRejections (node:internal/process/task_queues:95:5)"," at async /home/vlado/dev/human/dist/human.node-gpu.js:830:8854"]}
|
||||
2022-10-17 10:35:58 [36mINFO: [39m
|
||||
2022-10-17 10:35:58 [36mINFO: [39m test-backend-node-wasm.js start
|
||||
2022-10-17 10:35:58 [32mDATA: [39m test-backend-node-wasm.js stdout: 2022-10-17 10:35:58 [36mINFO: [39m { supported: [33mtrue[39m, backend: [33mtrue[39m, simd: [33mtrue[39m, multithread: [33mfalse[39m } https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@4.0.0/dist/
|
||||
2022-10-17 10:35:58 [35mSTATE:[39m test-backend-node-wasm.js passed: model server: https://vladmandic.github.io/human-models/models/
|
||||
2022-10-17 10:35:58 [36mINFO: [39m test-backend-node-wasm.js test: configuration validation
|
||||
2022-10-17 10:35:58 [35mSTATE:[39m test-backend-node-wasm.js passed: configuration default validation []
|
||||
2022-10-17 10:35:58 [35mSTATE:[39m test-backend-node-wasm.js passed: configuration invalid validation [{"reason":"unknown property","where":"config.invalid = true"}]
|
||||
2022-10-17 10:35:58 [36mINFO: [39m test-backend-node-wasm.js test: model load
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js passed: models loaded 25 11 [{"name":"ssrnetage","loaded":false,"url":null},{"name":"gear","loaded":false,"url":null},{"name":"blazeposedetect","loaded":false,"url":null},{"name":"blazepose","loaded":false,"url":null},{"name":"centernet","loaded":true,"url":"https://vladmandic.github.io/human-models/models/mb3-centernet.json"},{"name":"efficientpose","loaded":false,"url":null},{"name":"mobilefacenet","loaded":false,"url":null},{"name":"insightface","loaded":false,"url":null},{"name":"emotion","loaded":true,"url":"https://vladmandic.github.io/human-models/models/emotion.json"},{"name":"facedetect","loaded":true,"url":"https://vladmandic.github.io/human-models/models/blazeface.json"},{"name":"faceiris","loaded":true,"url":"https://vladmandic.github.io/human-models/models/iris.json"},{"name":"facemesh","loaded":true,"url":"https://vladmandic.github.io/human-models/models/facemesh.json"},{"name":"faceres","loaded":true,"url":"https://vladmandic.github.io/human-models/models/faceres.json"},{"name":"ssrnetgender","loaded":false,"url":null},{"name":"handpose","loaded":false,"url":null},{"name":"handskeleton","loaded":true,"url":"https://vladmandic.github.io/human-models/models/handlandmark-full.json"},{"name":"handtrack","loaded":true,"url":"https://vladmandic.github.io/human-models/models/handtrack.json"},{"name":"liveness","loaded":true,"url":"https://vladmandic.github.io/human-models/models/liveness.json"},{"name":"meet","loaded":false,"url":null},{"name":"movenet","loaded":true,"url":"https://vladmandic.github.io/human-models/models/movenet-lightning.json"},{"name":"nanodet","loaded":false,"url":null},{"name":"posenet","loaded":false,"url":null},{"name":"selfie","loaded":false,"url":null},{"name":"rvm","loaded":false,"url":null},{"name":"antispoof","loaded":true,"url":"https://vladmandic.github.io/human-models/models/antispoof.json"}]
|
||||
2022-10-17 10:36:01 [36mINFO: [39m test-backend-node-wasm.js memory: {"memory":{"unreliable":false,"numTensors":1785,"numDataBuffers":1785,"numBytes":63247332}}
|
||||
2022-10-17 10:36:01 [36mINFO: [39m test-backend-node-wasm.js state: {"state":{"registeredVariables":{},"nextTapeNodeId":0,"numBytes":63247332,"numTensors":1785,"numStringTensors":0,"numDataBuffers":1785,"gradientDepth":0,"kernelDepth":0,"scopeStack":[],"numDataMovesStack":[],"nextScopeId":0,"tensorInfo":{},"profiling":false,"activeProfile":{"newBytes":0,"newTensors":0,"peakBytes":0,"kernels":[],"result":null,"kernelNames":[]}}}
|
||||
2022-10-17 10:36:01 [36mINFO: [39m test-backend-node-wasm.js test: warmup
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js passed: create human
|
||||
2022-10-17 10:36:01 [36mINFO: [39m test-backend-node-wasm.js human version: 3.0.0
|
||||
2022-10-17 10:36:01 [36mINFO: [39m test-backend-node-wasm.js platform: linux x64 agent: NodeJS v18.10.0
|
||||
2022-10-17 10:36:01 [36mINFO: [39m test-backend-node-wasm.js tfjs version: 4.0.0
|
||||
2022-10-17 10:36:01 [36mINFO: [39m test-backend-node-wasm.js env: {"browser":false,"node":true,"platform":"linux x64","agent":"NodeJS v18.10.0","backends":["wasm"],"initial":false,"tfjs":{"version":"4.0.0"},"offscreen":false,"perfadd":false,"tensorflow":{},"wasm":{"supported":true,"backend":true,"simd":true,"multithread":false},"webgl":{"supported":false,"backend":false},"webgpu":{"supported":false,"backend":false},"cpu":{"flags":[]},"kernels":126}
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js passed: set backend: wasm
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js tensors 1785
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js passed: load models
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js result: defined models: 25 loaded models: 11
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js passed: warmup: none default
|
||||
2022-10-17 10:36:01 [32mDATA: [39m test-backend-node-wasm.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {}
|
||||
2022-10-17 10:36:01 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: null
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js passed: warmup none result match
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js event: warmup
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js passed: warmup: face default
|
||||
2022-10-17 10:36:01 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3}
|
||||
2022-10-17 10:36:01 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 532
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js passed: warmup face result match
|
||||
2022-10-17 10:36:01 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js event: warmup
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: warmup: body default
|
||||
2022-10-17 10:36:02 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:36:02 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 376
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: warmup body result match
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js details: {"face":{"boxScore":0.93,"faceScore":1,"age":23.7,"gender":"female","genderScore":0.97},"emotion":[{"score":0.59,"emotion":"angry"},{"score":0.29,"emotion":"fear"}],"body":{"score":0.92,"keypoints":17},"hand":{"boxScore":0.51,"fingerScore":0.73,"keypoints":21},"gestures":[{"face":0,"gesture":"facing right"},{"face":0,"gesture":"mouth 21% open"},{"hand":0,"gesture":"pinky forward"},{"hand":0,"gesture":"palm up"},{"hand":0,"gesture":"open palm"},{"iris":0,"gesture":"looking left"},{"iris":0,"gesture":"looking up"}]}
|
||||
2022-10-17 10:36:02 [36mINFO: [39m test-backend-node-wasm.js test: details verification
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js start default
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg default
|
||||
2022-10-17 10:36:02 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:36:02 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 346
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details face length 1
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details face score 1 0.93 1
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details face age/gender 23.7 female 0.97 85.47
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details face arrays 4 478 1024
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details face emotion 2 {"score":0.59,"emotion":"angry"}
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details face anti-spoofing 0.79
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details face liveness 0.83
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details body length 1
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details body 0.92 17 6
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details hand length 1
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details hand 0.51 0.73 point
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details hand arrays 21 5 7
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details gesture length 7
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details gesture first {"face":0,"gesture":"facing right"}
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details object length 1
|
||||
2022-10-17 10:36:02 [35mSTATE:[39m test-backend-node-wasm.js passed: details object 0.72 person
|
||||
2022-10-17 10:36:03 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1413675264}
|
||||
2022-10-17 10:36:03 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:03 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:03 [35mSTATE:[39m test-backend-node-wasm.js passed: tensor shape: [1,1200,1200,4] dtype: float32
|
||||
2022-10-17 10:36:03 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1200,1200,4] {"checksum":1413675264}
|
||||
2022-10-17 10:36:03 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:03 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:03 [35mSTATE:[39m test-backend-node-wasm.js passed: tensor shape: [1200,1200,4] dtype: float32
|
||||
2022-10-17 10:36:04 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:04 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:04 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:04 [35mSTATE:[39m test-backend-node-wasm.js passed: tensor shape: [1,1200,1200,3] dtype: float32
|
||||
2022-10-17 10:36:04 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:04 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:05 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:05 [35mSTATE:[39m test-backend-node-wasm.js passed: tensor shape: [1200,1200,3] dtype: float32
|
||||
2022-10-17 10:36:05 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996871}
|
||||
2022-10-17 10:36:05 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:05 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:05 [35mSTATE:[39m test-backend-node-wasm.js passed: tensor shape: [1,1200,1200,4] dtype: int32
|
||||
2022-10-17 10:36:05 [36mINFO: [39m test-backend-node-wasm.js test default
|
||||
2022-10-17 10:36:05 [35mSTATE:[39m test-backend-node-wasm.js start async
|
||||
2022-10-17 10:36:05 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:05 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg async
|
||||
2022-10-17 10:36:06 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:36:06 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 326
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js passed: default result face match 1 female 0.97
|
||||
2022-10-17 10:36:06 [36mINFO: [39m test-backend-node-wasm.js test sync
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js start sync
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg sync
|
||||
2022-10-17 10:36:06 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:36:06 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 333
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js passed: default sync 1 female 0.97
|
||||
2022-10-17 10:36:06 [36mINFO: [39m test-backend-node-wasm.js test: image process
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34697856}
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js passed: image input null [1,256,256,3]
|
||||
2022-10-17 10:36:06 [36mINFO: [39m test-backend-node-wasm.js test: image null
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js passed: invalid input could not convert input to tensor
|
||||
2022-10-17 10:36:06 [36mINFO: [39m test-backend-node-wasm.js test face similarity
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js start face similarity
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34697856}
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-face.jpg face similarity
|
||||
2022-10-17 10:36:06 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3}
|
||||
2022-10-17 10:36:06 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 311
|
||||
2022-10-17 10:36:06 [35mSTATE:[39m test-backend-node-wasm.js start face similarity
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg face similarity
|
||||
2022-10-17 10:36:07 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:36:07 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 339
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js start face similarity
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-upper.jpg [1,720,688,3] {"checksum":151155104}
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-upper.jpg face similarity
|
||||
2022-10-17 10:36:07 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.75,"keypoints":7}
|
||||
2022-10-17 10:36:07 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 297
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js passed: face descriptor
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js passed: face similarity {"similarity":[1,0.5266119940661309,0.4858842904087851],"descriptors":[1024,1024,1024]}
|
||||
2022-10-17 10:36:07 [36mINFO: [39m test-backend-node-wasm.js test object
|
||||
2022-10-17 10:36:07 [35mSTATE:[39m test-backend-node-wasm.js start object
|
||||
2022-10-17 10:36:08 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:08 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:08 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:08 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg object
|
||||
2022-10-17 10:36:08 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:36:08 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 326
|
||||
2022-10-17 10:36:08 [35mSTATE:[39m test-backend-node-wasm.js passed: centernet
|
||||
2022-10-17 10:36:08 [35mSTATE:[39m test-backend-node-wasm.js start object
|
||||
2022-10-17 10:36:09 [33mWARN: [39m test-backend-node-wasm.js missing kernel ops {"title":"object","model":"nanodet","url":"https://vladmandic.github.io/human-models/models/nanodet.json","missing":["sparsetodense"],"backkend":"wasm"}
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg object
|
||||
2022-10-17 10:36:09 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:36:09 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 217
|
||||
2022-10-17 10:36:09 [31mERROR:[39m test-backend-node-wasm.js failed: nanodet []
|
||||
2022-10-17 10:36:09 [36mINFO: [39m test-backend-node-wasm.js test sensitive
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js start sensitive
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg sensitive
|
||||
2022-10-17 10:36:09 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:36:09 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 243
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js passed: sensitive result match
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js passed: sensitive face result match
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js passed: sensitive face emotion result [{"score":0.46,"emotion":"neutral"},{"score":0.24,"emotion":"fear"},{"score":0.17,"emotion":"sad"}]
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js passed: sensitive body result match
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js passed: sensitive hand result match
|
||||
2022-10-17 10:36:09 [36mINFO: [39m test-backend-node-wasm.js test body
|
||||
2022-10-17 10:36:09 [35mSTATE:[39m test-backend-node-wasm.js start blazepose
|
||||
2022-10-17 10:36:11 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:11 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:12 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:12 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg blazepose
|
||||
2022-10-17 10:36:12 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.99,"keypoints":39}
|
||||
2022-10-17 10:36:12 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 403
|
||||
2022-10-17 10:36:12 [35mSTATE:[39m test-backend-node-wasm.js passed: blazepose
|
||||
2022-10-17 10:36:12 [35mSTATE:[39m test-backend-node-wasm.js start efficientpose
|
||||
2022-10-17 10:36:12 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:12 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:13 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:13 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg efficientpose
|
||||
2022-10-17 10:36:13 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.75,"keypoints":13}
|
||||
2022-10-17 10:36:13 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 656
|
||||
2022-10-17 10:36:13 [35mSTATE:[39m test-backend-node-wasm.js passed: efficientpose
|
||||
2022-10-17 10:36:13 [35mSTATE:[39m test-backend-node-wasm.js start posenet
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg posenet
|
||||
2022-10-17 10:36:14 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.96,"keypoints":16}
|
||||
2022-10-17 10:36:14 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 290
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js passed: posenet
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js start movenet
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856}
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js event: detect
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg movenet
|
||||
2022-10-17 10:36:14 [32mDATA: [39m test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.92,"keypoints":17}
|
||||
2022-10-17 10:36:14 [32mDATA: [39m test-backend-node-wasm.js result: performance: load: null total: 236
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js passed: movenet
|
||||
2022-10-17 10:36:14 [36mINFO: [39m test-backend-node-wasm.js test face matching
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js passed: face database 40
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js passed: face match {"first":{"index":4,"similarity":0.7827852754786533}} {"second":{"index":4,"similarity":0.5660821189104794}} {"third":{"index":4,"similarity":0.45074189882665594}}
|
||||
2022-10-17 10:36:14 [36mINFO: [39m test-backend-node-wasm.js test face similarity alternative
|
||||
2022-10-17 10:36:14 [35mSTATE:[39m test-backend-node-wasm.js start face embeddings
|
||||
2022-10-17 10:36:15 [35mSTATE:[39m test-backend-node-wasm.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34697856}
|
||||
2022-10-17 10:36:15 [35mSTATE:[39m test-backend-node-wasm.js event: image
|
||||
2022-10-17 10:36:15 [31mERROR:[39m test-backend-node-wasm.js failed: testDetect face embeddings
|
||||
2022-10-17 10:36:15 [31mERROR:[39m test-backend-node-wasm.js uncaughtException {"name":"TypeError","message":"Cannot read properties of undefined (reading 'img_inputs')","stack":["TypeError: Cannot read properties of undefined (reading 'img_inputs')"," at /home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30706:69"," at Array.reduce (<anonymous>)"," at GraphModel.normalizeInputs (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30705:32)"," at GraphModel.execute (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30777:23)"," at /home/vlado/dev/human/dist/human.node-wasm.js:99:175358"," at new Promise (<anonymous>)"," at U5 (/home/vlado/dev/human/dist/human.node-wasm.js:99:175127)"," at k1 (/home/vlado/dev/human/dist/human.node-wasm.js:111:11475)"," at process.processTicksAndRejections (node:internal/process/task_queues:95:5)"," at async /home/vlado/dev/human/dist/human.node-wasm.js:830:8854"]}
|
||||
2022-10-17 10:36:15 [35mSTATE:[39m all tests complete
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"../demo/nodejs/node.js","passed":1,"failed":0}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"../demo/nodejs/node-simple.js","passed":1,"failed":0}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"../demo/nodejs/node-event.js","passed":1,"failed":0}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"../demo/nodejs/node-similarity.js","passed":1,"failed":0}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"../demo/nodejs/node-canvas.js","passed":1,"failed":0}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"../demo/nodejs/process-folder.js","passed":1,"failed":0}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"../demo/multithread/node-multiprocess.js","passed":1,"failed":0}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"../demo/facematch/node-match.js","passed":1,"failed":0}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"test-node-load.js","passed":1,"failed":0}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"test-node-gear.js","passed":3,"failed":0}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"test-backend-node.js","passed":85,"failed":1}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"test-backend-node-gpu.js","passed":85,"failed":1}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m status {"test":"test-backend-node-wasm.js","passed":85,"failed":2}
|
||||
2022-10-17 10:36:15 [36mINFO: [39m failures {"count":4}
|
||||
2022-10-17 10:36:15 [33mWARN: [39m failed {"test":"test-backend-node.js","message":["error",["failed:","testDetect face embeddings"]]}
|
||||
2022-10-17 10:36:15 [33mWARN: [39m failed {"test":"test-backend-node-gpu.js","message":["error",["failed:","testDetect face embeddings"]]}
|
||||
2022-10-17 10:36:15 [33mWARN: [39m failed {"test":"test-backend-node-wasm.js","message":["error",["failed: nanodet",[]]]}
|
||||
2022-10-17 10:36:15 [33mWARN: [39m failed {"test":"test-backend-node-wasm.js","message":["error",["failed:","testDetect face embeddings"]]}
|
||||
2022-10-25 17:03:39 [36mINFO: [39m @vladmandic/human version 3.0.0
|
||||
2022-10-25 17:03:39 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v18.10.0
|
||||
2022-10-25 17:03:39 [36mINFO: [39m demos: [{"cmd":"../demo/nodejs/node.js","args":[]},{"cmd":"../demo/nodejs/node-simple.js","args":[]},{"cmd":"../demo/nodejs/node-event.js","args":["samples/in/ai-body.jpg"]},{"cmd":"../demo/nodejs/node-similarity.js","args":["samples/in/ai-face.jpg","samples/in/ai-upper.jpg"]},{"cmd":"../demo/nodejs/node-canvas.js","args":["samples/in/ai-body.jpg","samples/out/ai-body.jpg"]},{"cmd":"../demo/nodejs/process-folder.js","args":["samples"]},{"cmd":"../demo/multithread/node-multiprocess.js","args":[]},{"cmd":"../demo/facematch/node-match.js","args":[]}]
|
||||
2022-10-25 17:03:39 [36mINFO: [39m {"cmd":"../demo/nodejs/node.js","args":[]} start
|
||||
2022-10-25 17:03:40 [36mINFO: [39m {"cmd":"../demo/nodejs/node-simple.js","args":[]} start
|
||||
2022-10-25 17:03:41 [36mINFO: [39m {"cmd":"../demo/nodejs/node-event.js","args":["samples/in/ai-body.jpg"]} start
|
||||
2022-10-25 17:03:41 [36mINFO: [39m {"cmd":"../demo/nodejs/node-similarity.js","args":["samples/in/ai-face.jpg","samples/in/ai-upper.jpg"]} start
|
||||
|
|
Loading…
Reference in New Issue