swtich to tfjs source import

pull/50/head
Vladimir Mandic 2020-11-17 10:18:15 -05:00
parent cad783c6fc
commit 1efbc5c69d
30 changed files with 17513 additions and 4772 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -30,7 +30,7 @@
"imports": []
},
"dist/human.esm.js": {
"bytes": 1933724,
"bytes": 1837699,
"imports": []
}
},
@ -38,29 +38,29 @@
"dist/demo-browser-index.js.map": {
"imports": [],
"inputs": {},
"bytes": 5414456
"bytes": 4813960
},
"dist/demo-browser-index.js": {
"imports": [],
"exports": [],
"inputs": {
"dist/human.esm.js": {
"bytesInOutput": 1926649
"bytesInOutput": 1830347
},
"demo/draw.js": {
"bytesInOutput": 7341
},
"demo/menu.js": {
"bytesInOutput": 12020
"bytesInOutput": 12028
},
"assets/gl-bench.js": {
"bytesInOutput": 7809
},
"demo/browser.js": {
"bytesInOutput": 17327
"bytesInOutput": 17360
}
},
"bytes": 1978411
"bytes": 1882150
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -9,7 +9,7 @@
"imports": []
},
"src/age/age.js": {
"bytes": 1966,
"bytes": 1953,
"imports": [
{
"path": "src/tf.js"
@ -80,7 +80,7 @@
]
},
"src/body/modelPoseNet.js": {
"bytes": 1922,
"bytes": 1913,
"imports": [
{
"path": "src/tf.js"
@ -133,7 +133,7 @@
]
},
"src/embedding/embedding.js": {
"bytes": 2105,
"bytes": 2092,
"imports": [
{
"path": "src/tf.js"
@ -144,7 +144,7 @@
]
},
"src/emotion/emotion.js": {
"bytes": 3004,
"bytes": 2991,
"imports": [
{
"path": "src/tf.js"
@ -155,7 +155,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6941,
"bytes": 6928,
"imports": [
{
"path": "src/tf.js"
@ -175,7 +175,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2471,
"bytes": 2461,
"imports": [
{
"path": "src/tf.js"
@ -213,7 +213,7 @@
"imports": []
},
"src/gender/gender.js": {
"bytes": 3407,
"bytes": 3394,
"imports": [
{
"path": "src/tf.js"
@ -265,7 +265,7 @@
]
},
"src/hand/handpose.js": {
"bytes": 3026,
"bytes": 3020,
"imports": [
{
"path": "src/tf.js"
@ -286,7 +286,7 @@
"imports": []
},
"src/human.js": {
"bytes": 15693,
"bytes": 15690,
"imports": [
{
"path": "src/tf.js"
@ -349,7 +349,7 @@
"imports": []
},
"src/tf.js": {
"bytes": 1178,
"bytes": 200,
"imports": []
}
},
@ -357,7 +357,7 @@
"dist/human.esm-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 670719
"bytes": 669648
},
"dist/human.esm-nobundle.js": {
"imports": [],
@ -366,7 +366,7 @@
],
"inputs": {
"src/face/blazeface.js": {
"bytesInOutput": 5445
"bytesInOutput": 5448
},
"src/face/box.js": {
"bytesInOutput": 1666
@ -381,22 +381,22 @@
"bytesInOutput": 9779
},
"src/face/facemesh.js": {
"bytesInOutput": 1938
"bytesInOutput": 1944
},
"src/profile.js": {
"bytesInOutput": 895
},
"src/age/age.js": {
"bytesInOutput": 1209
"bytesInOutput": 1212
},
"src/gender/gender.js": {
"bytesInOutput": 2029
"bytesInOutput": 2032
},
"src/emotion/emotion.js": {
"bytesInOutput": 1881
"bytesInOutput": 1884
},
"src/embedding/embedding.js": {
"bytesInOutput": 1403
"bytesInOutput": 1404
},
"src/body/modelBase.js": {
"bytesInOutput": 640
@ -426,7 +426,7 @@
"bytesInOutput": 1955
},
"src/body/modelPoseNet.js": {
"bytesInOutput": 1668
"bytesInOutput": 1671
},
"src/body/posenet.js": {
"bytesInOutput": 863
@ -441,7 +441,7 @@
"bytesInOutput": 127037
},
"src/hand/handpose.js": {
"bytesInOutput": 1825
"bytesInOutput": 1831
},
"src/gesture.js": {
"bytesInOutput": 2328
@ -453,10 +453,10 @@
"bytesInOutput": 4061
},
"src/tf.js": {
"bytesInOutput": 163
"bytesInOutput": 115
},
"src/human.js": {
"bytesInOutput": 10583
"bytesInOutput": 10588
},
"src/hand/box.js": {
"bytesInOutput": 1979
@ -471,7 +471,7 @@
"bytesInOutput": 20
}
},
"bytes": 250185
"bytes": 250170
}
}
}

232
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

8592
dist/human.esm.json vendored

File diff suppressed because it is too large Load Diff

232
dist/human.js vendored

File diff suppressed because one or more lines are too long

6
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

8592
dist/human.json vendored

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -9,7 +9,7 @@
"imports": []
},
"src/age/age.js": {
"bytes": 1966,
"bytes": 1953,
"imports": [
{
"path": "src/tf.js"
@ -80,7 +80,7 @@
]
},
"src/body/modelPoseNet.js": {
"bytes": 1922,
"bytes": 1913,
"imports": [
{
"path": "src/tf.js"
@ -133,7 +133,7 @@
]
},
"src/embedding/embedding.js": {
"bytes": 2105,
"bytes": 2092,
"imports": [
{
"path": "src/tf.js"
@ -144,7 +144,7 @@
]
},
"src/emotion/emotion.js": {
"bytes": 3004,
"bytes": 2991,
"imports": [
{
"path": "src/tf.js"
@ -155,7 +155,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6941,
"bytes": 6928,
"imports": [
{
"path": "src/tf.js"
@ -175,7 +175,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2471,
"bytes": 2461,
"imports": [
{
"path": "src/tf.js"
@ -213,7 +213,7 @@
"imports": []
},
"src/gender/gender.js": {
"bytes": 3407,
"bytes": 3394,
"imports": [
{
"path": "src/tf.js"
@ -265,7 +265,7 @@
]
},
"src/hand/handpose.js": {
"bytes": 3026,
"bytes": 3020,
"imports": [
{
"path": "src/tf.js"
@ -286,7 +286,7 @@
"imports": []
},
"src/human.js": {
"bytes": 15693,
"bytes": 15690,
"imports": [
{
"path": "src/tf.js"
@ -349,7 +349,7 @@
"imports": []
},
"src/tf.js": {
"bytes": 1178,
"bytes": 200,
"imports": []
}
},
@ -357,14 +357,14 @@
"dist/human.node-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 686879
"bytes": 685808
},
"dist/human.node-nobundle.js": {
"imports": [],
"exports": [],
"inputs": {
"src/face/blazeface.js": {
"bytesInOutput": 5449
"bytesInOutput": 5452
},
"src/face/box.js": {
"bytesInOutput": 1673
@ -379,22 +379,22 @@
"bytesInOutput": 9781
},
"src/face/facemesh.js": {
"bytesInOutput": 1942
"bytesInOutput": 1948
},
"src/profile.js": {
"bytesInOutput": 897
},
"src/age/age.js": {
"bytesInOutput": 1212
"bytesInOutput": 1215
},
"src/gender/gender.js": {
"bytesInOutput": 2032
"bytesInOutput": 2035
},
"src/emotion/emotion.js": {
"bytesInOutput": 1884
"bytesInOutput": 1887
},
"src/embedding/embedding.js": {
"bytesInOutput": 1407
"bytesInOutput": 1408
},
"src/body/modelBase.js": {
"bytesInOutput": 642
@ -424,7 +424,7 @@
"bytesInOutput": 1963
},
"src/body/modelPoseNet.js": {
"bytesInOutput": 1671
"bytesInOutput": 1674
},
"src/body/posenet.js": {
"bytesInOutput": 877
@ -439,7 +439,7 @@
"bytesInOutput": 127039
},
"src/hand/handpose.js": {
"bytesInOutput": 1828
"bytesInOutput": 1834
},
"src/gesture.js": {
"bytesInOutput": 2332
@ -454,7 +454,7 @@
"bytesInOutput": 10626
},
"src/tf.js": {
"bytesInOutput": 184
"bytesInOutput": 141
},
"src/hand/box.js": {
"bytesInOutput": 1979
@ -469,7 +469,7 @@
"bytesInOutput": 20
}
},
"bytes": 250498
"bytes": 250483
}
}
}

3936
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

246
dist/human.node.json vendored
View File

@ -4,6 +4,71 @@
"bytes": 8721,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
"bytes": 297728,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/index.js"
}
]
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/index.js": {
"bytes": 2110,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/alea.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xor128.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xorwow.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xorshift7.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xor4096.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/tychei.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/seedrandom.js"
}
]
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/alea.js": {
"bytes": 3243,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/tychei.js": {
"bytes": 2525,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xor128.js": {
"bytes": 1748,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xor4096.js": {
"bytes": 4559,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xorshift7.js": {
"bytes": 2418,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xorwow.js": {
"bytes": 1919,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/seedrandom.js": {
"bytes": 8358,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-wasm/dist/backend_wasm.js": {
"bytes": 14032,
"imports": [
@ -1135,6 +1200,22 @@
"bytes": 27699,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
"bytes": 607535,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
}
]
},
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
"bytes": 312103,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
}
]
},
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
"bytes": 1313261,
"imports": [
@ -1143,14 +1224,70 @@
}
]
},
"node_modules/@tensorflow/tfjs/dist/tf.es2017.js": {
"bytes": 3526165,
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
"bytes": 217014,
"imports": [
{
"path": "node_modules/node-fetch/lib/index.mjs"
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
}
]
},
"node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js": {
"bytes": 811045,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
}
]
},
"node_modules/@tensorflow/tfjs/dist/index.js": {
"bytes": 1785,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js"
},
{
"path": "node_modules/@tensorflow/tfjs/dist/version.js"
}
]
},
"node_modules/@tensorflow/tfjs/dist/version.js": {
"bytes": 175,
"imports": []
},
"node_modules/node-fetch/lib/index.mjs": {
"bytes": 41097,
"imports": []
@ -1160,7 +1297,7 @@
"imports": []
},
"src/age/age.js": {
"bytes": 1966,
"bytes": 1953,
"imports": [
{
"path": "src/tf.js"
@ -1231,7 +1368,7 @@
]
},
"src/body/modelPoseNet.js": {
"bytes": 1922,
"bytes": 1913,
"imports": [
{
"path": "src/tf.js"
@ -1284,7 +1421,7 @@
]
},
"src/embedding/embedding.js": {
"bytes": 2105,
"bytes": 2092,
"imports": [
{
"path": "src/tf.js"
@ -1295,7 +1432,7 @@
]
},
"src/emotion/emotion.js": {
"bytes": 3004,
"bytes": 2991,
"imports": [
{
"path": "src/tf.js"
@ -1306,7 +1443,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6941,
"bytes": 6928,
"imports": [
{
"path": "src/tf.js"
@ -1326,7 +1463,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2471,
"bytes": 2461,
"imports": [
{
"path": "src/tf.js"
@ -1364,7 +1501,7 @@
"imports": []
},
"src/gender/gender.js": {
"bytes": 3407,
"bytes": 3394,
"imports": [
{
"path": "src/tf.js"
@ -1416,7 +1553,7 @@
]
},
"src/hand/handpose.js": {
"bytes": 3026,
"bytes": 3020,
"imports": [
{
"path": "src/tf.js"
@ -1437,7 +1574,7 @@
"imports": []
},
"src/human.js": {
"bytes": 15693,
"bytes": 15690,
"imports": [
{
"path": "src/tf.js"
@ -1500,10 +1637,10 @@
"imports": []
},
"src/tf.js": {
"bytes": 1178,
"bytes": 200,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js"
"path": "node_modules/@tensorflow/tfjs/dist/index.js"
},
{
"path": "node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js"
@ -1515,7 +1652,7 @@
"dist/human.node.js.map": {
"imports": [],
"inputs": {},
"bytes": 7188678
"bytes": 5797367
},
"dist/human.node.js": {
"imports": [],
@ -1524,12 +1661,51 @@
"node_modules/node-fetch/lib/index.mjs": {
"bytesInOutput": 25323
},
"node_modules/@tensorflow/tfjs/dist/tf.es2017.js": {
"bytesInOutput": 1380779
},
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
"bytesInOutput": 462681
},
"node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js": {
"bytesInOutput": 362887
},
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
"bytesInOutput": 159012
},
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
"bytesInOutput": 74518
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/alea.js": {
"bytesInOutput": 1304
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xor128.js": {
"bytesInOutput": 1065
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xorwow.js": {
"bytesInOutput": 1170
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xorshift7.js": {
"bytesInOutput": 1324
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xor4096.js": {
"bytesInOutput": 1521
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/tychei.js": {
"bytesInOutput": 1191
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/seedrandom.js": {
"bytesInOutput": 2593
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/index.js": {
"bytesInOutput": 383
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
"bytesInOutput": 144570
},
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
"bytesInOutput": 372092
},
"node_modules/@tensorflow/tfjs/dist/index.js": {
"bytesInOutput": 979
},
"node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.js": {
"bytesInOutput": 73037
},
@ -1537,7 +1713,7 @@
"bytesInOutput": 27645
},
"src/face/blazeface.js": {
"bytesInOutput": 5451
"bytesInOutput": 5454
},
"src/face/box.js": {
"bytesInOutput": 1673
@ -1552,22 +1728,22 @@
"bytesInOutput": 9799
},
"src/face/facemesh.js": {
"bytesInOutput": 1942
"bytesInOutput": 1948
},
"src/profile.js": {
"bytesInOutput": 897
},
"src/age/age.js": {
"bytesInOutput": 1212
"bytesInOutput": 1215
},
"src/gender/gender.js": {
"bytesInOutput": 2032
"bytesInOutput": 2035
},
"src/emotion/emotion.js": {
"bytesInOutput": 1884
"bytesInOutput": 1887
},
"src/embedding/embedding.js": {
"bytesInOutput": 1407
"bytesInOutput": 1408
},
"src/body/modelBase.js": {
"bytesInOutput": 642
@ -1597,7 +1773,7 @@
"bytesInOutput": 1963
},
"src/body/modelPoseNet.js": {
"bytesInOutput": 1679
"bytesInOutput": 1682
},
"src/body/posenet.js": {
"bytesInOutput": 889
@ -1612,7 +1788,7 @@
"bytesInOutput": 127039
},
"src/hand/handpose.js": {
"bytesInOutput": 1828
"bytesInOutput": 1834
},
"src/gesture.js": {
"bytesInOutput": 2332
@ -1624,10 +1800,16 @@
"bytesInOutput": 4063
},
"src/human.js": {
"bytesInOutput": 10622
"bytesInOutput": 10635
},
"node_modules/@tensorflow/tfjs/dist/version.js": {
"bytesInOutput": 22
},
"src/tf.js": {
"bytesInOutput": 80
"bytesInOutput": 36
},
"node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js": {
"bytesInOutput": 159
},
"node_modules/@tensorflow/tfjs-backend-wasm/dist/register_all_kernels.js": {
"bytesInOutput": 1298
@ -1879,7 +2061,7 @@
"bytesInOutput": 566
},
"node_modules/@tensorflow/tfjs-backend-wasm/dist/backend_wasm.js": {
"bytesInOutput": 6588
"bytesInOutput": 6972
},
"node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.worker.js": {
"bytesInOutput": 3432
@ -1887,8 +2069,8 @@
"node_modules/@tensorflow/tfjs-backend-wasm/dist/base.js": {
"bytesInOutput": 0
},
"node_modules/@tensorflow/tfjs-backend-wasm/dist/index.js": {
"bytesInOutput": 0
"node_modules/@tensorflow/tfjs-backend-wasm/dist/version.js": {
"bytesInOutput": 23
},
"src/hand/box.js": {
"bytesInOutput": 1979
@ -1903,7 +2085,7 @@
"bytesInOutput": 21
}
},
"bytes": 2308357
"bytes": 2052772
}
}
}

6
package-lock.json generated
View File

@ -821,9 +821,9 @@
}
},
"esbuild": {
"version": "0.8.8",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.8.8.tgz",
"integrity": "sha512-1Wo7L5Y6FpUUalF2APCh9cJi+IZ60jU9IBpTZSXA7jj3HItpAxPTmeIqGsaRW66rjg8SU6rvLnvQpgWqkCkCeA==",
"version": "0.8.9",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.8.9.tgz",
"integrity": "sha512-HAV4mKJqos0L8g6pL7evrw/ZPm478yFNtkuYhqJAeTrIW40XtBxhHrt4Pm2faYeRB8K6nA7dTDgmF+O0e9JCXQ==",
"dev": true
},
"escalade": {

View File

@ -28,7 +28,7 @@
"@vladmandic/pilogger": "^0.2.7",
"chokidar": "^3.4.3",
"dayjs": "^1.9.6",
"esbuild": "^0.8.8",
"esbuild": "^0.8.9",
"eslint": "^7.13.0",
"eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-import": "^2.22.1",

View File

@ -1,4 +1,4 @@
import { tf, loadGraphModel } from '../tf.js';
import { tf } from '../tf.js';
import * as profile from '../profile.js';
const models = {};
@ -7,7 +7,7 @@ let frame = Number.MAX_SAFE_INTEGER;
async function load(config) {
if (!models.age) {
models.age = await loadGraphModel(config.face.age.modelPath);
models.age = await tf.loadGraphModel(config.face.age.modelPath);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`);
}

View File

@ -1,4 +1,4 @@
import { loadGraphModel } from '../tf.js';
import { tf } from '../tf.js';
import * as modelMobileNet from './modelMobileNet';
import * as decodeMultiple from './decodeMultiple';
import * as util from './util';
@ -38,7 +38,7 @@ class PoseNet {
exports.PoseNet = PoseNet;
async function load(config) {
const graphModel = await loadGraphModel(config.body.modelPath);
const graphModel = await tf.loadGraphModel(config.body.modelPath);
const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`);

View File

@ -1,4 +1,4 @@
import { tf, loadGraphModel } from '../tf.js';
import { tf } from '../tf.js';
import * as profile from '../profile.js';
// based on https://github.com/sirius-ai/MobileFaceNet_TF
@ -8,7 +8,7 @@ const models = {};
async function load(config) {
if (!models.embedding) {
models.embedding = await loadGraphModel(config.face.embedding.modelPath);
models.embedding = await tf.loadGraphModel(config.face.embedding.modelPath);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.face.embedding.modelPath.match(/\/(.*)\./)[1]}`);
}

View File

@ -1,4 +1,4 @@
import { tf, loadGraphModel } from '../tf.js';
import { tf } from '../tf.js';
import * as profile from '../profile.js';
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];
@ -12,7 +12,7 @@ const scale = 1; // score multiplication factor
async function load(config) {
if (!models.emotion) {
models.emotion = await loadGraphModel(config.face.emotion.modelPath);
models.emotion = await tf.loadGraphModel(config.face.emotion.modelPath);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
}

View File

@ -1,4 +1,4 @@
import { tf, loadGraphModel } from '../tf.js';
import { tf } from '../tf.js';
const NUM_LANDMARKS = 6;
@ -164,7 +164,7 @@ class BlazeFaceModel {
}
async function load(config) {
const blazeface = await loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') });
const blazeface = await tf.loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') });
const model = new BlazeFaceModel(blazeface, config);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.detector.modelPath.match(/\/(.*)\./)[1]}`);

View File

@ -1,4 +1,4 @@
import { tf, loadGraphModel } from '../tf.js';
import { tf } from '../tf.js';
import * as blazeface from './blazeface.js';
import * as pipe from './facepipeline.js';
import * as coords from './coords.js';
@ -42,8 +42,8 @@ class MediaPipeFaceMesh {
async function load(config) {
const models = await Promise.all([
blazeface.load(config),
loadGraphModel(config.mesh.modelPath, { fromTFHub: config.mesh.modelPath.includes('tfhub.dev') }),
loadGraphModel(config.iris.modelPath, { fromTFHub: config.iris.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.mesh.modelPath, { fromTFHub: config.mesh.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.iris.modelPath, { fromTFHub: config.iris.modelPath.includes('tfhub.dev') }),
]);
const faceMesh = new MediaPipeFaceMesh(models[0], models[1], models[2], config);
// eslint-disable-next-line no-console

View File

@ -1,4 +1,4 @@
import { tf, loadGraphModel } from '../tf.js';
import { tf } from '../tf.js';
import * as profile from '../profile.js';
const models = {};
@ -11,7 +11,7 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
async function load(config) {
if (!models.gender) {
models.gender = await loadGraphModel(config.face.gender.modelPath);
models.gender = await tf.loadGraphModel(config.face.gender.modelPath);
alternative = models.gender.inputs[0].shape[3] === 1;
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`);

View File

@ -16,7 +16,7 @@
*/
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html
import { loadGraphModel } from '../tf.js';
import { tf } from '../tf.js';
import * as handdetector from './handdetector';
import * as pipeline from './handpipeline';
import * as anchors from './anchors';
@ -69,8 +69,8 @@ exports.HandPose = HandPose;
async function load(config) {
const [handDetectorModel, handPoseModel] = await Promise.all([
loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }),
loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }),
]);
const detector = new handdetector.HandDetector(handDetectorModel, config.inputSize, anchors.anchors);
const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.inputSize);

View File

@ -1,4 +1,4 @@
import { tf, setWasmPaths } from './tf.js';
import { tf, wasm } from './tf.js';
import * as facemesh from './face/facemesh.js';
import * as age from './age/age.js';
import * as gender from './gender/gender.js';
@ -172,7 +172,7 @@ class Human {
if (this.config.backend === 'wasm') {
this.log('settings wasm path:', this.config.wasmPath);
setWasmPaths(this.config.wasmPath);
wasm.setWasmPaths(this.config.wasmPath);
const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
if (!simd) this.log('warning: wasm simd support is not enabled');
}

View File

@ -1,29 +1,4 @@
// from esm bundles: bundle 3.3M
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';
import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';
// wrapper to load tfjs in a single place so version can be changed quickly
const loadGraphModel = tf.loadGraphModel;
// const setWasmPaths = tf.setWasmPaths;
export { tf, loadGraphModel, setWasmPaths };
// modular: bundle 4.2M
/*
import * as tf from '@tensorflow/tfjs-core/dist/tf-core.es2017.js';
import { loadGraphModel } from '@tensorflow/tfjs-converter/dist/tf-converter.es2017.js';
import * as tfCPU from '@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js';
import * as tfWebGL from '@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.es2017.js';
import { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';
const version = { core: tf.version, cpu: tfCPU.version_cpu, webgl: tfWebGL.version_webgl, wasm: version_wasm };
export { tf, setWasmPaths, loadGraphModel, version };
*/
// custom: bundle 3.4M
/*
import * as tf from '../../../dev-clone/tfjs/tfjs/dist/index.js';
import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';
const loadGraphModel = tf.loadGraphModel;
export { tf, setWasmPaths, loadGraphModel };
*/
export * as tf from '@tensorflow/tfjs/dist/index.js';
export * as wasm from '@tensorflow/tfjs-backend-wasm/dist/index.js';