mirror of https://github.com/vladmandic/human
enable webgl uniform support for faster warmup
parent
cddcc91953
commit
303ffe0012
|
@ -1,6 +1,6 @@
|
|||
# @vladmandic/human
|
||||
|
||||
Version: **2.1.1**
|
||||
Version: **2.1.2**
|
||||
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
|
||||
|
||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||
|
@ -9,8 +9,12 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/07/29 mandic00@live.com
|
||||
### **2.1.2** 2021/07/29 mandic00@live.com
|
||||
|
||||
|
||||
### **origin/main** 2021/07/29 mandic00@live.com
|
||||
|
||||
- fix typo
|
||||
- rebuild new release
|
||||
|
||||
### **2.1.1** 2021/07/29 mandic00@live.com
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -11325,7 +11325,7 @@ lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/
|
|||
2Q==`;
|
||||
|
||||
// package.json
|
||||
var version = "2.1.1";
|
||||
var version = "2.1.2";
|
||||
|
||||
// src/human.ts
|
||||
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
|
||||
|
@ -11401,7 +11401,8 @@ var Human = class {
|
|||
if (this.tf.getBackend() === "webgl" || this.tf.getBackend() === "humangl") {
|
||||
this.tf.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
|
||||
this.tf.ENV.set("WEBGL_CPU_FORWARD", true);
|
||||
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", true);
|
||||
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", false);
|
||||
this.tf.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true);
|
||||
if (typeof this.config["deallocate"] !== "undefined" && this.config["deallocate"]) {
|
||||
log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", true);
|
||||
this.tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
|
||||
|
|
|
@ -11326,7 +11326,7 @@ lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/
|
|||
2Q==`;
|
||||
|
||||
// package.json
|
||||
var version = "2.1.1";
|
||||
var version = "2.1.2";
|
||||
|
||||
// src/human.ts
|
||||
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
|
||||
|
@ -11402,7 +11402,8 @@ var Human = class {
|
|||
if (this.tf.getBackend() === "webgl" || this.tf.getBackend() === "humangl") {
|
||||
this.tf.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
|
||||
this.tf.ENV.set("WEBGL_CPU_FORWARD", true);
|
||||
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", true);
|
||||
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", false);
|
||||
this.tf.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true);
|
||||
if (typeof this.config["deallocate"] !== "undefined" && this.config["deallocate"]) {
|
||||
log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", true);
|
||||
this.tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
|
||||
|
|
|
@ -11325,7 +11325,7 @@ lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/
|
|||
2Q==`;
|
||||
|
||||
// package.json
|
||||
var version = "2.1.1";
|
||||
var version = "2.1.2";
|
||||
|
||||
// src/human.ts
|
||||
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
|
||||
|
@ -11401,7 +11401,8 @@ var Human = class {
|
|||
if (this.tf.getBackend() === "webgl" || this.tf.getBackend() === "humangl") {
|
||||
this.tf.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
|
||||
this.tf.ENV.set("WEBGL_CPU_FORWARD", true);
|
||||
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", true);
|
||||
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", false);
|
||||
this.tf.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true);
|
||||
if (typeof this.config["deallocate"] !== "undefined" && this.config["deallocate"]) {
|
||||
log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", true);
|
||||
this.tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
|
@ -146,7 +146,7 @@ const targets = {
|
|||
entryPoints: ['tfjs/tf-browser.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['fs', 'buffer', 'util', 'os'],
|
||||
treeShaking: 'ignore-annotations',
|
||||
// treeShaking: 'ignore-annotations',
|
||||
sourcemap: true,
|
||||
minifyWhitespace: false,
|
||||
minifyIdentifiers: false,
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
2021-07-29 12:48:55 [36mINFO: [39m @vladmandic/human version 2.1.1
|
||||
2021-07-29 12:48:55 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-07-29 12:48:55 [36mINFO: [39m Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.16","typescript":"4.3.5","typedoc":"0.21.4","eslint":"7.31.0"}
|
||||
2021-07-29 12:48:55 [36mINFO: [39m Clean: ["dist/*","types/*","typedoc/*"]
|
||||
2021-07-29 12:48:55 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-07-29 12:48:55 [35mSTATE:[39m target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-07-29 12:48:55 [35mSTATE:[39m target: node type: node: {"imports":42,"importBytes":433972,"outputBytes":377240,"outputFiles":"dist/human.node.js"}
|
||||
2021-07-29 12:48:55 [35mSTATE:[39m target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-07-29 12:48:55 [35mSTATE:[39m target: nodeGPU type: node: {"imports":42,"importBytes":433980,"outputBytes":377244,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-07-29 12:48:55 [35mSTATE:[39m target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-07-29 12:48:55 [35mSTATE:[39m target: nodeWASM type: node: {"imports":42,"importBytes":434047,"outputBytes":377316,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-07-29 12:48:55 [35mSTATE:[39m target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2817,"outputBytes":1214,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-07-29 12:48:55 [35mSTATE:[39m target: browserNoBundle type: esm: {"imports":42,"importBytes":433883,"outputBytes":247685,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-07-29 12:48:56 [35mSTATE:[39m target: browserBundle type: tfjs: {"modules":1552,"moduleBytes":5630928,"imports":7,"importBytes":2817,"outputBytes":2817976,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-07-29 12:48:56 [35mSTATE:[39m target: browserBundle type: iife: {"imports":42,"importBytes":3250645,"outputBytes":1599157,"outputFiles":"dist/human.js"}
|
||||
2021-07-29 12:48:57 [35mSTATE:[39m target: browserBundle type: esm: {"imports":42,"importBytes":3250645,"outputBytes":1599149,"outputFiles":"dist/human.esm.js"}
|
||||
2021-07-29 12:48:57 [36mINFO: [39m Running Linter: ["server/","src/","tfjs/","test/","demo/"]
|
||||
2021-07-29 12:49:22 [36mINFO: [39m Linter complete: files: 74 errors: 0 warnings: 0
|
||||
2021-07-29 12:49:22 [36mINFO: [39m Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-07-29 12:49:22 [36mINFO: [39m Generate Typings: ["src/human.ts"] outDir: ["types"]
|
||||
2021-07-29 12:49:37 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
|
||||
2021-07-29 12:49:53 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
||||
2021-07-29 16:34:07 [36mINFO: [39m @vladmandic/human version 2.1.2
|
||||
2021-07-29 16:34:07 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-07-29 16:34:07 [36mINFO: [39m Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.17","typescript":"4.3.5","typedoc":"0.21.4","eslint":"7.31.0"}
|
||||
2021-07-29 16:34:07 [36mINFO: [39m Clean: ["dist/*","types/*","typedoc/*"]
|
||||
2021-07-29 16:34:07 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-07-29 16:34:07 [35mSTATE:[39m target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-07-29 16:34:07 [35mSTATE:[39m target: node type: node: {"imports":42,"importBytes":434230,"outputBytes":377710,"outputFiles":"dist/human.node.js"}
|
||||
2021-07-29 16:34:07 [35mSTATE:[39m target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-07-29 16:34:07 [35mSTATE:[39m target: nodeGPU type: node: {"imports":42,"importBytes":434238,"outputBytes":377714,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-07-29 16:34:07 [35mSTATE:[39m target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-07-29 16:34:08 [35mSTATE:[39m target: nodeWASM type: node: {"imports":42,"importBytes":434305,"outputBytes":377786,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-07-29 16:34:08 [35mSTATE:[39m target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2111,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-07-29 16:34:08 [35mSTATE:[39m target: browserNoBundle type: esm: {"imports":42,"importBytes":434169,"outputBytes":247939,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-07-29 16:34:08 [35mSTATE:[39m target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2111,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-07-29 16:34:09 [35mSTATE:[39m target: browserBundle type: iife: {"imports":42,"importBytes":2767628,"outputBytes":1378402,"outputFiles":"dist/human.js"}
|
||||
2021-07-29 16:34:09 [35mSTATE:[39m target: browserBundle type: esm: {"imports":42,"importBytes":2767628,"outputBytes":1378394,"outputFiles":"dist/human.esm.js"}
|
||||
2021-07-29 16:34:09 [36mINFO: [39m Running Linter: ["server/","src/","tfjs/","test/","demo/"]
|
||||
2021-07-29 16:34:32 [36mINFO: [39m Linter complete: files: 74 errors: 0 warnings: 0
|
||||
2021-07-29 16:34:33 [36mINFO: [39m Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-07-29 16:34:33 [36mINFO: [39m Generate Typings: ["src/human.ts"] outDir: ["types"]
|
||||
2021-07-29 16:34:46 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
|
||||
2021-07-29 16:35:00 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
||||
|
|
|
@ -329,7 +329,8 @@ export class Human {
|
|||
if (this.tf.getBackend() === 'webgl' || this.tf.getBackend() === 'humangl') {
|
||||
this.tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
|
||||
this.tf.ENV.set('WEBGL_CPU_FORWARD', true);
|
||||
this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||
this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', false);
|
||||
this.tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true);
|
||||
// if (!this.config.object.enabled) this.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
|
||||
if (typeof this.config['deallocate'] !== 'undefined' && this.config['deallocate']) { // hidden param
|
||||
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true);
|
||||
|
|
240
test/test.log
240
test/test.log
|
@ -1,120 +1,120 @@
|
|||
2021-07-29 10:01:36 [36mINFO: [39m @vladmandic/human version 2.0.3
|
||||
2021-07-29 10:01:36 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-07-29 10:01:36 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||
2021-07-29 10:01:36 [36mINFO: [39m test-node.js start
|
||||
2021-07-29 10:01:36 [35mSTATE:[39m test-node.js passed: create human
|
||||
2021-07-29 10:01:36 [36mINFO: [39m test-node.js human version: 2.0.3
|
||||
2021-07-29 10:01:36 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-07-29 10:01:36 [36mINFO: [39m test-node.js tfjs version: 3.8.0
|
||||
2021-07-29 10:01:36 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||
2021-07-29 10:01:36 [35mSTATE:[39m test-node.js passed: load models
|
||||
2021-07-29 10:01:36 [35mSTATE:[39m test-node.js result: defined models: 14 loaded models: 7
|
||||
2021-07-29 10:01:36 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||
2021-07-29 10:01:38 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||
2021-07-29 10:01:38 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
||||
2021-07-29 10:01:38 [32mDATA: [39m test-node.js result: performance: load: 381 total: 1321
|
||||
2021-07-29 10:01:39 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||
2021-07-29 10:01:39 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 10:01:39 [32mDATA: [39m test-node.js result: performance: load: 381 total: 1085
|
||||
2021-07-29 10:01:39 [36mINFO: [39m test-node.js test body variants
|
||||
2021-07-29 10:01:40 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 10:01:40 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-07-29 10:01:40 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-07-29 10:01:40 [32mDATA: [39m test-node.js result: performance: load: 381 total: 690
|
||||
2021-07-29 10:01:41 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 10:01:41 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-07-29 10:01:41 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 10:01:41 [32mDATA: [39m test-node.js result: performance: load: 381 total: 190
|
||||
2021-07-29 10:01:42 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||
2021-07-29 10:01:42 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-07-29 10:01:42 [32mDATA: [39m test-node.js result: performance: load: 381 total: 595
|
||||
2021-07-29 10:01:42 [36mINFO: [39m test-node.js test: first instance
|
||||
2021-07-29 10:01:42 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-07-29 10:01:43 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-07-29 10:01:43 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-07-29 10:01:43 [32mDATA: [39m test-node.js result: performance: load: 381 total: 902
|
||||
2021-07-29 10:01:43 [36mINFO: [39m test-node.js test: second instance
|
||||
2021-07-29 10:01:43 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-07-29 10:01:44 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-07-29 10:01:44 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-07-29 10:01:44 [32mDATA: [39m test-node.js result: performance: load: 3 total: 892
|
||||
2021-07-29 10:01:44 [36mINFO: [39m test-node.js test: concurrent
|
||||
2021-07-29 10:01:44 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-07-29 10:01:44 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-07-29 10:01:45 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 10:01:46 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 10:01:50 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-07-29 10:01:50 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-07-29 10:01:50 [32mDATA: [39m test-node.js result: performance: load: 381 total: 3806
|
||||
2021-07-29 10:01:50 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-07-29 10:01:50 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-07-29 10:01:50 [32mDATA: [39m test-node.js result: performance: load: 3 total: 3806
|
||||
2021-07-29 10:01:50 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-07-29 10:01:50 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 10:01:50 [32mDATA: [39m test-node.js result: performance: load: 381 total: 3806
|
||||
2021-07-29 10:01:50 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-07-29 10:01:50 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 10:01:50 [32mDATA: [39m test-node.js result: performance: load: 3 total: 3806
|
||||
2021-07-29 10:01:50 [36mINFO: [39m test-node.js test complete: 13727 ms
|
||||
2021-07-29 10:01:50 [36mINFO: [39m test-node-gpu.js start
|
||||
2021-07-29 10:01:50 [33mWARN: [39m test-node-gpu.js stderr: 2021-07-29 10:01:50.676793: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||
2021-07-29 10:01:50 [33mWARN: [39m test-node-gpu.js stderr: 2021-07-29 10:01:50.723534: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||
2021-07-29 10:01:50 [33mWARN: [39m test-node-gpu.js stderr: 2021-07-29 10:01:50.723659: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||
2021-07-29 10:01:50 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||
2021-07-29 10:01:50 [36mINFO: [39m test-node-gpu.js human version: 2.0.3
|
||||
2021-07-29 10:01:50 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-07-29 10:01:50 [36mINFO: [39m test-node-gpu.js tfjs version: 3.8.0
|
||||
2021-07-29 10:01:51 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||
2021-07-29 10:01:51 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||
2021-07-29 10:01:51 [35mSTATE:[39m test-node-gpu.js result: defined models: 14 loaded models: 7
|
||||
2021-07-29 10:01:51 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||
2021-07-29 10:01:52 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||
2021-07-29 10:01:52 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
||||
2021-07-29 10:01:52 [32mDATA: [39m test-node-gpu.js result: performance: load: 290 total: 1146
|
||||
2021-07-29 10:01:53 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||
2021-07-29 10:01:53 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 10:01:53 [32mDATA: [39m test-node-gpu.js result: performance: load: 290 total: 1090
|
||||
2021-07-29 10:01:53 [36mINFO: [39m test-node-gpu.js test body variants
|
||||
2021-07-29 10:01:54 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 10:01:54 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-07-29 10:01:54 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-07-29 10:01:54 [32mDATA: [39m test-node-gpu.js result: performance: load: 290 total: 736
|
||||
2021-07-29 10:01:55 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 10:01:55 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-07-29 10:01:55 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 10:01:55 [32mDATA: [39m test-node-gpu.js result: performance: load: 290 total: 202
|
||||
2021-07-29 10:01:56 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||
2021-07-29 10:01:56 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0,"keypoints":0}
|
||||
2021-07-29 10:01:56 [32mDATA: [39m test-node-gpu.js result: performance: load: 290 total: 169
|
||||
2021-07-29 10:01:56 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||
2021-07-29 10:01:56 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-07-29 10:01:56 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-07-29 10:01:56 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-07-29 10:01:56 [32mDATA: [39m test-node-gpu.js result: performance: load: 290 total: 123
|
||||
2021-07-29 10:01:56 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||
2021-07-29 10:01:57 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-07-29 10:01:57 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-07-29 10:01:57 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-07-29 10:01:57 [32mDATA: [39m test-node-gpu.js result: performance: load: 2 total: 842
|
||||
2021-07-29 10:01:57 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||
2021-07-29 10:01:57 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-07-29 10:01:57 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-07-29 10:01:58 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 10:01:59 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 10:02:03 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-07-29 10:02:03 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-07-29 10:02:03 [32mDATA: [39m test-node-gpu.js result: performance: load: 290 total: 3784
|
||||
2021-07-29 10:02:03 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-07-29 10:02:03 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-07-29 10:02:03 [32mDATA: [39m test-node-gpu.js result: performance: load: 2 total: 3784
|
||||
2021-07-29 10:02:03 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-07-29 10:02:03 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 10:02:03 [32mDATA: [39m test-node-gpu.js result: performance: load: 290 total: 3784
|
||||
2021-07-29 10:02:03 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-07-29 10:02:03 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 10:02:03 [32mDATA: [39m test-node-gpu.js result: performance: load: 2 total: 3784
|
||||
2021-07-29 10:02:03 [36mINFO: [39m test-node-gpu.js test complete: 12649 ms
|
||||
2021-07-29 10:02:03 [36mINFO: [39m test-node-wasm.js start
|
||||
2021-07-29 10:02:03 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
||||
2021-07-29 10:02:03 [31mERROR:[39m test-node-wasm.js aborting test
|
||||
2021-07-29 10:02:03 [36mINFO: [39m status: {"passed":46,"failed":1}
|
||||
2021-07-29 16:33:25 [36mINFO: [39m @vladmandic/human version 2.1.1
|
||||
2021-07-29 16:33:25 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-07-29 16:33:25 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||
2021-07-29 16:33:25 [36mINFO: [39m test-node.js start
|
||||
2021-07-29 16:33:26 [35mSTATE:[39m test-node.js passed: create human
|
||||
2021-07-29 16:33:26 [36mINFO: [39m test-node.js human version: 2.1.1
|
||||
2021-07-29 16:33:26 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.0.0
|
||||
2021-07-29 16:33:26 [36mINFO: [39m test-node.js tfjs version: 3.8.0
|
||||
2021-07-29 16:33:26 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||
2021-07-29 16:33:26 [35mSTATE:[39m test-node.js passed: load models
|
||||
2021-07-29 16:33:26 [35mSTATE:[39m test-node.js result: defined models: 14 loaded models: 7
|
||||
2021-07-29 16:33:26 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||
2021-07-29 16:33:28 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||
2021-07-29 16:33:28 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
||||
2021-07-29 16:33:28 [32mDATA: [39m test-node.js result: performance: load: 295 total: 1315
|
||||
2021-07-29 16:33:29 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||
2021-07-29 16:33:29 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 16:33:29 [32mDATA: [39m test-node.js result: performance: load: 295 total: 1065
|
||||
2021-07-29 16:33:29 [36mINFO: [39m test-node.js test body variants
|
||||
2021-07-29 16:33:30 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 16:33:30 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-07-29 16:33:30 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-07-29 16:33:30 [32mDATA: [39m test-node.js result: performance: load: 295 total: 716
|
||||
2021-07-29 16:33:31 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 16:33:31 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-07-29 16:33:32 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 16:33:32 [32mDATA: [39m test-node.js result: performance: load: 295 total: 202
|
||||
2021-07-29 16:33:32 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||
2021-07-29 16:33:32 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-07-29 16:33:32 [32mDATA: [39m test-node.js result: performance: load: 295 total: 592
|
||||
2021-07-29 16:33:32 [36mINFO: [39m test-node.js test: first instance
|
||||
2021-07-29 16:33:33 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-07-29 16:33:33 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-07-29 16:33:33 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-07-29 16:33:33 [32mDATA: [39m test-node.js result: performance: load: 295 total: 848
|
||||
2021-07-29 16:33:33 [36mINFO: [39m test-node.js test: second instance
|
||||
2021-07-29 16:33:34 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-07-29 16:33:35 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-07-29 16:33:35 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-07-29 16:33:35 [32mDATA: [39m test-node.js result: performance: load: 2 total: 825
|
||||
2021-07-29 16:33:35 [36mINFO: [39m test-node.js test: concurrent
|
||||
2021-07-29 16:33:35 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-07-29 16:33:35 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-07-29 16:33:35 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 16:33:36 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 16:33:40 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-07-29 16:33:40 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-07-29 16:33:40 [32mDATA: [39m test-node.js result: performance: load: 295 total: 3744
|
||||
2021-07-29 16:33:40 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-07-29 16:33:40 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-07-29 16:33:40 [32mDATA: [39m test-node.js result: performance: load: 2 total: 3744
|
||||
2021-07-29 16:33:40 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-07-29 16:33:40 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 16:33:40 [32mDATA: [39m test-node.js result: performance: load: 295 total: 3744
|
||||
2021-07-29 16:33:40 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-07-29 16:33:40 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 16:33:40 [32mDATA: [39m test-node.js result: performance: load: 2 total: 3744
|
||||
2021-07-29 16:33:40 [36mINFO: [39m test-node.js test complete: 13942 ms
|
||||
2021-07-29 16:33:40 [36mINFO: [39m test-node-gpu.js start
|
||||
2021-07-29 16:33:41 [33mWARN: [39m test-node-gpu.js stderr: 2021-07-29 16:33:41.667000: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||
2021-07-29 16:33:41 [33mWARN: [39m test-node-gpu.js stderr: 2021-07-29 16:33:41.954020: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||
2021-07-29 16:33:41 [33mWARN: [39m test-node-gpu.js stderr: 2021-07-29 16:33:41.954055: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||
2021-07-29 16:33:42 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||
2021-07-29 16:33:42 [36mINFO: [39m test-node-gpu.js human version: 2.1.1
|
||||
2021-07-29 16:33:42 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
|
||||
2021-07-29 16:33:42 [36mINFO: [39m test-node-gpu.js tfjs version: 3.8.0
|
||||
2021-07-29 16:33:42 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||
2021-07-29 16:33:42 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||
2021-07-29 16:33:42 [35mSTATE:[39m test-node-gpu.js result: defined models: 14 loaded models: 7
|
||||
2021-07-29 16:33:42 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||
2021-07-29 16:33:43 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||
2021-07-29 16:33:43 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
||||
2021-07-29 16:33:43 [32mDATA: [39m test-node-gpu.js result: performance: load: 280 total: 1365
|
||||
2021-07-29 16:33:44 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||
2021-07-29 16:33:44 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 16:33:44 [32mDATA: [39m test-node-gpu.js result: performance: load: 280 total: 1061
|
||||
2021-07-29 16:33:44 [36mINFO: [39m test-node-gpu.js test body variants
|
||||
2021-07-29 16:33:45 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 16:33:46 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-07-29 16:33:46 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-07-29 16:33:46 [32mDATA: [39m test-node-gpu.js result: performance: load: 280 total: 729
|
||||
2021-07-29 16:33:47 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 16:33:47 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-07-29 16:33:47 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 16:33:47 [32mDATA: [39m test-node-gpu.js result: performance: load: 280 total: 199
|
||||
2021-07-29 16:33:47 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||
2021-07-29 16:33:47 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0,"keypoints":0}
|
||||
2021-07-29 16:33:47 [32mDATA: [39m test-node-gpu.js result: performance: load: 280 total: 172
|
||||
2021-07-29 16:33:47 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||
2021-07-29 16:33:48 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-07-29 16:33:48 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-07-29 16:33:48 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-07-29 16:33:48 [32mDATA: [39m test-node-gpu.js result: performance: load: 280 total: 146
|
||||
2021-07-29 16:33:48 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||
2021-07-29 16:33:48 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-07-29 16:33:49 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-07-29 16:33:49 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-07-29 16:33:49 [32mDATA: [39m test-node-gpu.js result: performance: load: 7 total: 905
|
||||
2021-07-29 16:33:49 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||
2021-07-29 16:33:49 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-07-29 16:33:49 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-07-29 16:33:50 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 16:33:51 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-07-29 16:33:55 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-07-29 16:33:55 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-07-29 16:33:55 [32mDATA: [39m test-node-gpu.js result: performance: load: 280 total: 3895
|
||||
2021-07-29 16:33:55 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-07-29 16:33:55 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-07-29 16:33:55 [32mDATA: [39m test-node-gpu.js result: performance: load: 7 total: 3895
|
||||
2021-07-29 16:33:55 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-07-29 16:33:55 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 16:33:55 [32mDATA: [39m test-node-gpu.js result: performance: load: 280 total: 3895
|
||||
2021-07-29 16:33:55 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-07-29 16:33:55 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-07-29 16:33:55 [32mDATA: [39m test-node-gpu.js result: performance: load: 7 total: 3895
|
||||
2021-07-29 16:33:55 [36mINFO: [39m test-node-gpu.js test complete: 13148 ms
|
||||
2021-07-29 16:33:55 [36mINFO: [39m test-node-wasm.js start
|
||||
2021-07-29 16:33:55 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
||||
2021-07-29 16:33:55 [31mERROR:[39m test-node-wasm.js aborting test
|
||||
2021-07-29 16:33:55 [36mINFO: [39m status: {"passed":46,"failed":1}
|
||||
|
|
|
@ -3,29 +3,6 @@
|
|||
* @external
|
||||
*/
|
||||
|
||||
// import from dist
|
||||
// get versions of all packages
|
||||
/*
|
||||
import * as packageBundle from '@tensorflow/tfjs/package.json';
|
||||
import * as packageCore from '@tensorflow/tfjs-core/package.json';
|
||||
import * as packageData from '@tensorflow/tfjs-data/package.json';
|
||||
import * as packageLayers from '@tensorflow/tfjs-layers/package.json';
|
||||
import * as packageConverter from '@tensorflow/tfjs-converter/package.json';
|
||||
// for backends, get version from source to avoid incorrect tree shaking
|
||||
import { version_cpu } from '@tensorflow/tfjs-backend-cpu/dist/index.js';
|
||||
import { version_webgl } from '@tensorflow/tfjs-backend-webgl/dist/index.js';
|
||||
import { version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';
|
||||
|
||||
// export all
|
||||
export * from '@tensorflow/tfjs-core/dist/index.js';
|
||||
export * from '@tensorflow/tfjs-layers/dist/index.js';
|
||||
export * from '@tensorflow/tfjs-converter/dist/index.js';
|
||||
export * as data from '@tensorflow/tfjs-data/dist/index.js';
|
||||
export * from '@tensorflow/tfjs-backend-cpu/dist/index.js';
|
||||
export * from '@tensorflow/tfjs-backend-webgl/dist/index.js';
|
||||
export * from '@tensorflow/tfjs-backend-wasm/dist/index.js';
|
||||
*/
|
||||
|
||||
// import from src
|
||||
// get versions of all packages
|
||||
import { version as tfjsVersion } from '@tensorflow/tfjs/package.json';
|
||||
|
@ -39,6 +16,7 @@ import { version as tfjsBackendWASMVersion } from '@tensorflow/tfjs-backend-wasm
|
|||
|
||||
// export all
|
||||
// requires treeShaking:ignore-annotations due to tfjs misconfiguration
|
||||
/*
|
||||
export * from '@tensorflow/tfjs-core/src/index';
|
||||
export * from '@tensorflow/tfjs-layers/src/index';
|
||||
export * from '@tensorflow/tfjs-converter/src/index';
|
||||
|
@ -46,9 +24,17 @@ export * as data from '@tensorflow/tfjs-data/src/index';
|
|||
export * from '@tensorflow/tfjs-backend-cpu/src/index';
|
||||
export * from '@tensorflow/tfjs-backend-webgl/src/index';
|
||||
export * from '@tensorflow/tfjs-backend-wasm/src/index';
|
||||
/*
|
||||
*/
|
||||
|
||||
// export all
|
||||
export * from '@tensorflow/tfjs-core/dist/index.js';
|
||||
export * from '@tensorflow/tfjs-layers/dist/index.js';
|
||||
export * from '@tensorflow/tfjs-converter/dist/index.js';
|
||||
export * as data from '@tensorflow/tfjs-data/dist/index.js';
|
||||
export * from '@tensorflow/tfjs-backend-cpu/dist/index.js';
|
||||
export * from '@tensorflow/tfjs-backend-webgl/dist/index.js';
|
||||
export * from '@tensorflow/tfjs-backend-wasm/dist/index.js';
|
||||
|
||||
// export versions
|
||||
export const version = {
|
||||
tfjs: tfjsVersion,
|
||||
|
|
|
@ -354,7 +354,7 @@ export var CustomCallback: {
|
|||
};
|
||||
};
|
||||
export var DataStorage: {
|
||||
new (backend3: any, dataMover: any): {
|
||||
new (backend2: any, dataMover: any): {
|
||||
backend: any;
|
||||
dataMover: any;
|
||||
data: WeakMap<object, any>;
|
||||
|
@ -384,7 +384,7 @@ export var EarlyStopping: {
|
|||
verbose: any;
|
||||
mode: any;
|
||||
baseline: any;
|
||||
monitorFunc: typeof less3;
|
||||
monitorFunc: typeof less2;
|
||||
onTrainBegin(logs: any): Promise<void>;
|
||||
wait: number | undefined;
|
||||
stoppedEpoch: any;
|
||||
|
@ -542,8 +542,8 @@ export var GPGPUContext: {
|
|||
createPackedMatrixTexture(rows: any, columns: any): any;
|
||||
deleteMatrixTexture(texture: any): void;
|
||||
downloadByteEncodedFloatMatrixFromOutputTexture(texture: any, rows: any, columns: any): any;
|
||||
downloadPackedMatrixFromBuffer(buffer3: any, batch: any, rows: any, columns: any, physicalRows: any, physicalCols: any): Float32Array;
|
||||
downloadFloat32MatrixFromBuffer(buffer3: any, size: any): Float32Array;
|
||||
downloadPackedMatrixFromBuffer(buffer2: any, batch: any, rows: any, columns: any, physicalRows: any, physicalCols: any): Float32Array;
|
||||
downloadFloat32MatrixFromBuffer(buffer2: any, size: any): Float32Array;
|
||||
createBufferFromTexture(texture: any, rows: any, columns: any): any;
|
||||
createAndWaitForFence(): Promise<any>;
|
||||
createFence(gl: any): {
|
||||
|
@ -649,8 +649,8 @@ export var GraphModel: {
|
|||
_executeAsync(inputs: any, outputs: any, isFunctionExecution?: boolean, tensorArrayMap?: {}, tensorListMap?: {}): Promise<any>;
|
||||
executeFunctionAsync(inputs: any, tensorArrayMap: any, tensorListMap: any): Promise<any>;
|
||||
executeWithControlFlow(inputs: any, context: any, outputNames: any, isFunctionExecution: any): Promise<any>;
|
||||
processStack(inputNodes: any, stack3: any, context: any, tensorMap: any, added: any, tensorsToKeep: any, outputNames: any, intermediateTensorConsumerCount: any, usedNodes: any): any[];
|
||||
processChildNodes(node: any, stack3: any, context: any, tensorMap: any, added: any, usedNodes: any): void;
|
||||
processStack(inputNodes: any, stack2: any, context: any, tensorMap: any, added: any, tensorsToKeep: any, outputNames: any, intermediateTensorConsumerCount: any, usedNodes: any): any[];
|
||||
processChildNodes(node: any, stack2: any, context: any, tensorMap: any, added: any, usedNodes: any): void;
|
||||
dispose(): void;
|
||||
checkInputShapeAndType(inputs: any): void;
|
||||
mapInputs(inputs: any): {};
|
||||
|
@ -690,8 +690,8 @@ export var GraphModel: {
|
|||
_executeAsync(inputs: any, outputs: any, isFunctionExecution?: boolean, tensorArrayMap?: {}, tensorListMap?: {}): Promise<any>;
|
||||
executeFunctionAsync(inputs: any, tensorArrayMap: any, tensorListMap: any): Promise<any>;
|
||||
executeWithControlFlow(inputs: any, context: any, outputNames: any, isFunctionExecution: any): Promise<any>;
|
||||
processStack(inputNodes: any, stack3: any, context: any, tensorMap: any, added: any, tensorsToKeep: any, outputNames: any, intermediateTensorConsumerCount: any, usedNodes: any): any[];
|
||||
processChildNodes(node: any, stack3: any, context: any, tensorMap: any, added: any, usedNodes: any): void;
|
||||
processStack(inputNodes: any, stack2: any, context: any, tensorMap: any, added: any, tensorsToKeep: any, outputNames: any, intermediateTensorConsumerCount: any, usedNodes: any): any[];
|
||||
processChildNodes(node: any, stack2: any, context: any, tensorMap: any, added: any, usedNodes: any): void;
|
||||
dispose(): void;
|
||||
checkInputShapeAndType(inputs: any): void;
|
||||
mapInputs(inputs: any): {};
|
||||
|
@ -919,6 +919,7 @@ export var MathBackendCPU: {
|
|||
epsilon(): number;
|
||||
timerAvailable(): boolean;
|
||||
};
|
||||
nextDataId: number;
|
||||
};
|
||||
export var MathBackendWebGL: {
|
||||
new (gpgpu: any): {
|
||||
|
@ -1018,7 +1019,7 @@ export var MathBackendWebGL: {
|
|||
shouldExecuteOnCPU(inputs: any, sizeThreshold?: any): any;
|
||||
getGPGPUContext(): any;
|
||||
where(condition: any): any;
|
||||
packedUnaryOp(x: any, op3: any, dtype: any): any;
|
||||
packedUnaryOp(x: any, op2: any, dtype: any): any;
|
||||
abs(x: any): any;
|
||||
makeTensorInfo(shape: any, dtype: any, values: any): {
|
||||
dataId: {
|
||||
|
@ -1070,6 +1071,7 @@ export var MathBackendWebGL: {
|
|||
acquireTexture(texShape: any, texType: any, dtype: any, isPacked: any): any;
|
||||
computeBytes(shape: any, dtype: any): number;
|
||||
};
|
||||
nextDataId: number;
|
||||
};
|
||||
export var Max: string;
|
||||
export var MaxPool: string;
|
||||
|
@ -1303,6 +1305,7 @@ export var RNN: {
|
|||
};
|
||||
};
|
||||
fromConfig(cls: any, config: any, customObjects?: {}): any;
|
||||
className: string;
|
||||
nodeKey(layer: any, nodeIndex: any): string;
|
||||
};
|
||||
export var Range: string;
|
||||
|
@ -1534,7 +1537,7 @@ export function batchToSpaceND(...args: any[]): any;
|
|||
export namespace batchToSpaceND { }
|
||||
export function bincount(...args: any[]): any;
|
||||
export namespace bincount { }
|
||||
export function booleanMaskAsync(tensor3: any, mask: any, axis: any): Promise<any>;
|
||||
export function booleanMaskAsync(tensor2: any, mask: any, axis: any): Promise<any>;
|
||||
export function broadcastTo(...args: any[]): any;
|
||||
export namespace broadcastTo { }
|
||||
declare var browser_exports: {};
|
||||
|
@ -1594,7 +1597,7 @@ export function cosineWindow(windowLength: any, a: any, b: any): any;
|
|||
export function cumsum(...args: any[]): any;
|
||||
export namespace cumsum { }
|
||||
export function customGrad(f: any): any;
|
||||
declare var src_exports: {};
|
||||
declare var dist_exports: {};
|
||||
export function denseBincount(...args: any[]): any;
|
||||
export namespace denseBincount { }
|
||||
export function deprecationWarn(msg: any): void;
|
||||
|
@ -1771,8 +1774,8 @@ export function loadGraphModel(modelUrl: any, options?: {}): Promise<{
|
|||
_executeAsync(inputs: any, outputs: any, isFunctionExecution?: boolean, tensorArrayMap?: {}, tensorListMap?: {}): Promise<any>;
|
||||
executeFunctionAsync(inputs: any, tensorArrayMap: any, tensorListMap: any): Promise<any>;
|
||||
executeWithControlFlow(inputs: any, context: any, outputNames: any, isFunctionExecution: any): Promise<any>;
|
||||
processStack(inputNodes: any, stack3: any, context: any, tensorMap: any, added: any, tensorsToKeep: any, outputNames: any, intermediateTensorConsumerCount: any, usedNodes: any): any[];
|
||||
processChildNodes(node: any, stack3: any, context: any, tensorMap: any, added: any, usedNodes: any): void;
|
||||
processStack(inputNodes: any, stack2: any, context: any, tensorMap: any, added: any, tensorsToKeep: any, outputNames: any, intermediateTensorConsumerCount: any, usedNodes: any): any[];
|
||||
processChildNodes(node: any, stack2: any, context: any, tensorMap: any, added: any, usedNodes: any): void;
|
||||
dispose(): void;
|
||||
checkInputShapeAndType(inputs: any): void;
|
||||
mapInputs(inputs: any): {};
|
||||
|
@ -1812,8 +1815,8 @@ export function loadGraphModel(modelUrl: any, options?: {}): Promise<{
|
|||
_executeAsync(inputs: any, outputs: any, isFunctionExecution?: boolean, tensorArrayMap?: {}, tensorListMap?: {}): Promise<any>;
|
||||
executeFunctionAsync(inputs: any, tensorArrayMap: any, tensorListMap: any): Promise<any>;
|
||||
executeWithControlFlow(inputs: any, context: any, outputNames: any, isFunctionExecution: any): Promise<any>;
|
||||
processStack(inputNodes: any, stack3: any, context: any, tensorMap: any, added: any, tensorsToKeep: any, outputNames: any, intermediateTensorConsumerCount: any, usedNodes: any): any[];
|
||||
processChildNodes(node: any, stack3: any, context: any, tensorMap: any, added: any, usedNodes: any): void;
|
||||
processStack(inputNodes: any, stack2: any, context: any, tensorMap: any, added: any, tensorsToKeep: any, outputNames: any, intermediateTensorConsumerCount: any, usedNodes: any): any[];
|
||||
processChildNodes(node: any, stack2: any, context: any, tensorMap: any, added: any, usedNodes: any): void;
|
||||
dispose(): void;
|
||||
checkInputShapeAndType(inputs: any): void;
|
||||
mapInputs(inputs: any): {};
|
||||
|
@ -2009,7 +2012,7 @@ export function randomNormal(...args: any[]): any;
|
|||
export namespace randomNormal { }
|
||||
export function randomUniform(...args: any[]): any;
|
||||
export namespace randomUniform { }
|
||||
export function range(start: any, stop: any, step6?: number, dtype?: string): any;
|
||||
export function range(start: any, stop: any, step5?: number, dtype?: string): any;
|
||||
export function ready(): any;
|
||||
export function real(...args: any[]): any;
|
||||
export namespace real { }
|
||||
|
@ -2191,7 +2194,7 @@ export function variableGrads(f: any, varList: any): {
|
|||
value: any;
|
||||
grads: {};
|
||||
};
|
||||
declare var version17: {
|
||||
declare var version16: {
|
||||
tfjs: string;
|
||||
"tfjs-core": string;
|
||||
"tfjs-data": string;
|
||||
|
@ -2201,12 +2204,12 @@ declare var version17: {
|
|||
"tfjs-backend-webgl": string;
|
||||
"tfjs-backend-wasm": string;
|
||||
};
|
||||
declare var version12: string;
|
||||
declare var version9: string;
|
||||
declare var version14: string;
|
||||
declare var version11: string;
|
||||
declare var version16: string;
|
||||
declare var version9: string;
|
||||
declare var version13: string;
|
||||
declare var version10: string;
|
||||
declare var version15: string;
|
||||
declare var version14: string;
|
||||
export namespace webgl {
|
||||
export { forceHalfFloat };
|
||||
}
|
||||
|
@ -2217,7 +2220,7 @@ export function whereAsync(condition: any): Promise<any>;
|
|||
export function zeros(shape: any, dtype?: string): any;
|
||||
export function zerosLike(...args: any[]): any;
|
||||
export namespace zerosLike { }
|
||||
declare function less3(currVal: any, prevVal: any): boolean;
|
||||
declare function less2(currVal: any, prevVal: any): boolean;
|
||||
declare function getQueryParams(queryString: any): {};
|
||||
declare function earlyStopping(args: any): {
|
||||
monitor: any;
|
||||
|
@ -2226,7 +2229,7 @@ declare function earlyStopping(args: any): {
|
|||
verbose: any;
|
||||
mode: any;
|
||||
baseline: any;
|
||||
monitorFunc: typeof less3;
|
||||
monitorFunc: typeof less2;
|
||||
onTrainBegin(logs: any): Promise<void>;
|
||||
wait: number | undefined;
|
||||
stoppedEpoch: any;
|
||||
|
@ -2621,4 +2624,4 @@ declare var OptimizerConstructors: {
|
|||
getClassName(): any;
|
||||
};
|
||||
};
|
||||
export { add2 as add, backend_util_exports as backend_util, browser_exports as browser, exports_constraints_exports as constraints, src_exports as data, device_util_exports as device_util, fused_ops_exports as fused, gather_nd_util_exports as gather_util, gpgpu_util_exports as gpgpu_util, exports_initializers_exports as initializers, io_exports as io, isFinite2 as isFinite, isNaN2 as isNaN, kernel_impls_exports as kernel_impls, exports_layers_exports as layers, math_exports as math, exports_metrics_exports as metrics, exports_models_exports as models, ones2 as ones, print2 as print, exports_regularizers_exports as regularizers, round2 as round, scatter_nd_util_exports as scatter_util, serialization_exports as serialization, shared_exports as shared, slice_util_exports as slice_util, sum2 as sum, tanh2 as tanh, tensor_util_exports as tensor_util, test_util_exports as test_util, util_exports as util, version17 as version, version12 as version_converter, version9 as version_core, version14 as version_cpu, version11 as version_layers, version16 as version_wasm, version15 as version_webgl, webgl_util_exports as webgl_util };
|
||||
export { add2 as add, backend_util_exports as backend_util, browser_exports as browser, exports_constraints_exports as constraints, dist_exports as data, device_util_exports as device_util, fused_ops_exports as fused, gather_nd_util_exports as gather_util, gpgpu_util_exports as gpgpu_util, exports_initializers_exports as initializers, io_exports as io, isFinite2 as isFinite, isNaN2 as isNaN, kernel_impls_exports as kernel_impls, exports_layers_exports as layers, math_exports as math, exports_metrics_exports as metrics, exports_models_exports as models, ones2 as ones, print2 as print, exports_regularizers_exports as regularizers, round2 as round, scatter_nd_util_exports as scatter_util, serialization_exports as serialization, shared_exports as shared, slice_util_exports as slice_util, sum2 as sum, tanh2 as tanh, tensor_util_exports as tensor_util, test_util_exports as test_util, util_exports as util, version16 as version, version11 as version_converter, version9 as version_core, version13 as version_cpu, version10 as version_layers, version15 as version_wasm, version14 as version_webgl, webgl_util_exports as webgl_util };
|
||||
|
|
Loading…
Reference in New Issue