Compare commits
149 Commits
Author | SHA1 | Date |
---|---|---|
![]() |
189226d63a | |
![]() |
f587b44f66 | |
![]() |
e3f11b8533 | |
![]() |
171d17cadf | |
![]() |
e4cdf624c9 | |
![]() |
c633f9fbe4 | |
![]() |
ffc3c40362 | |
![]() |
a8193f9077 | |
![]() |
155f07dccd | |
![]() |
2f0469fe6e | |
![]() |
697b265337 | |
![]() |
4719b81587 | |
![]() |
fc9a39ea13 | |
![]() |
438897c5a2 | |
![]() |
f4d4780267 | |
![]() |
a5c767fdff | |
![]() |
1fa29b0fd3 | |
![]() |
472f2e4480 | |
![]() |
4433ce44bc | |
![]() |
4ca829f941 | |
![]() |
038349968c | |
![]() |
ae96c7b230 | |
![]() |
f9f036ba01 | |
![]() |
0736a99250 | |
![]() |
3ea729badb | |
![]() |
d36ed6d266 | |
![]() |
4061d4d62f | |
![]() |
b034c46f80 | |
![]() |
aefd776a9e | |
![]() |
20eb54beb4 | |
![]() |
e8301c5277 | |
![]() |
fba823ba50 | |
![]() |
a1cb6de1e8 | |
![]() |
fb3836019f | |
![]() |
15ae496f40 | |
![]() |
0009d1bc34 | |
![]() |
adc4b3a11d | |
![]() |
7e5a1289ff | |
![]() |
cd2c553737 | |
![]() |
a433fc0681 | |
![]() |
f9902b0459 | |
![]() |
bd5ab6bb0f | |
![]() |
96fed4f123 | |
![]() |
0cbfd9b01b | |
![]() |
dea225bbeb | |
![]() |
602e86cbec | |
![]() |
00bf49b24f | |
![]() |
fa33c1281c | |
![]() |
7f613367a3 | |
![]() |
4d65f459f9 | |
![]() |
d28e5d2142 | |
![]() |
6aeb292453 | |
![]() |
289faf17f2 | |
![]() |
7a6f7d96b7 | |
![]() |
870eebedfa | |
![]() |
1ed702f713 | |
![]() |
b2a988e436 | |
![]() |
5c38676a83 | |
![]() |
bac0ef10cf | |
![]() |
8baef0ef68 | |
![]() |
c5dbb9d4e9 | |
![]() |
a8021dc2a3 | |
![]() |
f946780bab | |
![]() |
8e7061a9aa | |
![]() |
cd904ca5dd | |
![]() |
496779fee2 | |
![]() |
4ba4a99ee1 | |
![]() |
31170e750b | |
![]() |
5f58cd376d | |
![]() |
07eb00d7d6 | |
![]() |
a1f7a0841f | |
![]() |
49a594a59b | |
![]() |
3b3ab219dc | |
![]() |
2fce7338dc | |
![]() |
6cafeafba1 | |
![]() |
d0f1349a23 | |
![]() |
cdb0e485f8 | |
![]() |
5bcc4d2a73 | |
![]() |
92008ed6f4 | |
![]() |
c1b38f99fe | |
![]() |
0c5251c219 | |
![]() |
fcf61e5c30 | |
![]() |
8c7e21b1c9 | |
![]() |
2841969df8 | |
![]() |
39b137ed63 | |
![]() |
c53becfc67 | |
![]() |
fd427cce39 | |
![]() |
43805b50c6 | |
![]() |
fc18d89ab6 | |
![]() |
0de113080c | |
![]() |
471ddb7549 | |
![]() |
70991235df | |
![]() |
c07be32e26 | |
![]() |
936ecba7ec | |
![]() |
63476fcbc0 | |
![]() |
62da12758f | |
![]() |
bd4d5935fe | |
![]() |
118fbaba4d | |
![]() |
e70d9bb18b | |
![]() |
f1a2ef34a5 | |
![]() |
e7fd0efd27 | |
![]() |
eb5501c672 | |
![]() |
8b304fa3d4 | |
![]() |
1824a62efb | |
![]() |
bd2317d42e | |
![]() |
1def723c7b | |
![]() |
d78dd3aae1 | |
![]() |
461e074993 | |
![]() |
1d30a9f816 | |
![]() |
fcbfc8589a | |
![]() |
c7b2c65c97 | |
![]() |
1b4580dd6e | |
![]() |
fdddee7101 | |
![]() |
aee959f464 | |
![]() |
f70e5615b4 | |
![]() |
4ba43e08ae | |
![]() |
c3049e7c29 | |
![]() |
e2609a0ef2 | |
![]() |
d13586f549 | |
![]() |
519e346f02 | |
![]() |
efb307d230 | |
![]() |
47f2b53e92 | |
![]() |
9b810d8028 | |
![]() |
f48cbda416 | |
![]() |
ac172b8be5 | |
![]() |
2c8c8c2c1c | |
![]() |
9fb3029211 | |
![]() |
225192d18d | |
![]() |
8dab959446 | |
![]() |
42d9d677de | |
![]() |
d5b366629b | |
![]() |
1455c35c81 | |
![]() |
953ef705ab | |
![]() |
00803107ce | |
![]() |
2ac6baa02b | |
![]() |
7ef748390c | |
![]() |
b4ba10898f | |
![]() |
df47b3e2a9 | |
![]() |
76daa38bce | |
![]() |
e13a6d684b | |
![]() |
da426d5cfd | |
![]() |
1de3551a0b | |
![]() |
98ea06fb0e | |
![]() |
bf84748777 | |
![]() |
25735fcb34 | |
![]() |
7b8b30bfc9 | |
![]() |
107297015e | |
![]() |
b9c78b21b0 | |
![]() |
1c577b6ede |
|
@ -0,0 +1,148 @@
|
|||
{
|
||||
"log": {
|
||||
"enabled": false,
|
||||
"debug": false,
|
||||
"console": true,
|
||||
"output": "build.log"
|
||||
},
|
||||
"profiles": {
|
||||
"production": ["compile", "typings", "typedoc", "lint", "changelog"],
|
||||
"development": ["serve", "watch", "compile"]
|
||||
},
|
||||
"clean": {
|
||||
"locations": ["dist/*", "typedoc/*", "types/lib/src"]
|
||||
},
|
||||
"lint": {
|
||||
"locations": [ "src/" ],
|
||||
"rules": { }
|
||||
},
|
||||
"changelog": {
|
||||
"log": "CHANGELOG.md"
|
||||
},
|
||||
"serve": {
|
||||
"sslKey": "cert/https.key",
|
||||
"sslCrt": "cert/https.crt",
|
||||
"httpPort": 8000,
|
||||
"httpsPort": 8001,
|
||||
"documentRoot": ".",
|
||||
"defaultFolder": "demo",
|
||||
"defaultFile": "index.html"
|
||||
},
|
||||
"build": {
|
||||
"global": {
|
||||
"target": "es2018",
|
||||
"treeShaking": true,
|
||||
"ignoreAnnotations": true,
|
||||
"sourcemap": false,
|
||||
"banner": { "js": "/*\n Face-API\n homepage: <https://github.com/vladmandic/face-api>\n author: <https://github.com/vladmandic>'\n*/\n" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"name": "tfjs/browser/tf-version",
|
||||
"platform": "browser",
|
||||
"format": "esm",
|
||||
"input": "src/tfjs/tf-version.ts",
|
||||
"output": "dist/tfjs.version.js"
|
||||
},
|
||||
{
|
||||
"name": "tfjs/node/cpu",
|
||||
"platform": "node",
|
||||
"format": "cjs",
|
||||
"input": "src/tfjs/tf-node.ts",
|
||||
"output": "dist/tfjs.esm.js",
|
||||
"external": ["@tensorflow"]
|
||||
},
|
||||
{
|
||||
"name": "faceapi/node/cpu",
|
||||
"platform": "node",
|
||||
"format": "cjs",
|
||||
"input": "src/index.ts",
|
||||
"output": "dist/face-api.node.js",
|
||||
"external": ["@tensorflow"]
|
||||
},
|
||||
{
|
||||
"name": "tfjs/node/gpu",
|
||||
"platform": "node",
|
||||
"format": "cjs",
|
||||
"input": "src/tfjs/tf-node-gpu.ts",
|
||||
"output": "dist/tfjs.esm.js",
|
||||
"external": ["@tensorflow"]
|
||||
},
|
||||
{
|
||||
"name": "faceapi/node/gpu",
|
||||
"platform": "node",
|
||||
"format": "cjs",
|
||||
"input": "src/index.ts",
|
||||
"output": "dist/face-api.node-gpu.js",
|
||||
"external": ["@tensorflow"]
|
||||
},
|
||||
{
|
||||
"name": "tfjs/node/wasm",
|
||||
"platform": "node",
|
||||
"format": "cjs",
|
||||
"input": "src/tfjs/tf-node-wasm.ts",
|
||||
"output": "dist/tfjs.esm.js",
|
||||
"external": ["@tensorflow"]
|
||||
},
|
||||
{
|
||||
"name": "faceapi/node/wasm",
|
||||
"platform": "node",
|
||||
"format": "cjs",
|
||||
"input": "src/index.ts",
|
||||
"output": "dist/face-api.node-wasm.js",
|
||||
"external": ["@tensorflow"]
|
||||
},
|
||||
{
|
||||
"name": "tfjs/browser/esm/nobundle",
|
||||
"platform": "browser",
|
||||
"format": "esm",
|
||||
"input": "src/tfjs/tf-browser.ts",
|
||||
"output": "dist/tfjs.esm.js",
|
||||
"external": ["@tensorflow"]
|
||||
},
|
||||
{
|
||||
"name": "faceapi/browser/esm/nobundle",
|
||||
"platform": "browser",
|
||||
"format": "esm",
|
||||
"input": "src/index.ts",
|
||||
"output": "dist/face-api.esm-nobundle.js",
|
||||
"external": ["@tensorflow"]
|
||||
},
|
||||
{
|
||||
"name": "tfjs/browser/esm/bundle",
|
||||
"platform": "browser",
|
||||
"format": "esm",
|
||||
"input": "src/tfjs/tf-browser.ts",
|
||||
"output": "dist/tfjs.esm.js"
|
||||
},
|
||||
{
|
||||
"name": "faceapi/browser/iife/bundle",
|
||||
"platform": "browser",
|
||||
"format": "iife",
|
||||
"globalName": "faceapi",
|
||||
"minify": true,
|
||||
"input": "src/index.ts",
|
||||
"output": "dist/face-api.js",
|
||||
"external": ["@tensorflow"]
|
||||
},
|
||||
{
|
||||
"name": "faceapi/browser/esm/bundle",
|
||||
"platform": "browser",
|
||||
"format": "esm",
|
||||
"sourcemap": true,
|
||||
"input": "src/index.ts",
|
||||
"output": "dist/face-api.esm.js",
|
||||
"typings": "types/lib",
|
||||
"typedoc": "typedoc",
|
||||
"external": ["@tensorflow"]
|
||||
}
|
||||
]
|
||||
},
|
||||
"watch": {
|
||||
"enabled": true,
|
||||
"locations": [ "src/**" ]
|
||||
},
|
||||
"typescript": {
|
||||
"allowJs": false
|
||||
}
|
||||
}
|
|
@ -3,50 +3,74 @@
|
|||
"env": {
|
||||
"browser": true,
|
||||
"commonjs": true,
|
||||
"es6": true,
|
||||
"node": true,
|
||||
"es2020": true
|
||||
},
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": { "ecmaVersion": 2020 },
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"parserOptions": { "ecmaVersion": "latest" },
|
||||
"plugins": [
|
||||
"@typescript-eslint"
|
||||
],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:import/errors",
|
||||
"plugin:import/warnings",
|
||||
"plugin:import/typescript",
|
||||
"plugin:node/recommended",
|
||||
"plugin:promise/recommended",
|
||||
"plugin:json/recommended-with-comments",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"airbnb-base"
|
||||
],
|
||||
"ignorePatterns": [ "node_modules", "types" ],
|
||||
"settings": {
|
||||
"import/resolver": {
|
||||
"node": {
|
||||
"extensions": [".js", ".ts"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"rules": {
|
||||
"max-len": [1, 275, 3],
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/ban-types": "off",
|
||||
"@typescript-eslint/ban-ts-comment": "off",
|
||||
"@typescript-eslint/explicit-module-boundary-types": "off",
|
||||
"@typescript-eslint/no-var-requires": "off",
|
||||
"@typescript-eslint/no-empty-object-type": "off",
|
||||
"@typescript-eslint/no-require-imports": "off",
|
||||
"camelcase": "off",
|
||||
"class-methods-use-this": "off",
|
||||
"default-param-last": "off",
|
||||
"dot-notation": "off",
|
||||
"func-names": "off",
|
||||
"guard-for-in": "off",
|
||||
"import/extensions": "off",
|
||||
"import/no-cycle": "off",
|
||||
"import/no-extraneous-dependencies": "off",
|
||||
"import/no-named-as-default": "off",
|
||||
"import/no-unresolved": "off",
|
||||
"import/prefer-default-export": "off",
|
||||
"lines-between-class-members": "off",
|
||||
"max-len": [1, 275, 3],
|
||||
"newline-per-chained-call": "off",
|
||||
"no-async-promise-executor": "off",
|
||||
"no-await-in-loop": "off",
|
||||
"no-bitwise": "off",
|
||||
"no-case-declarations":"off",
|
||||
"no-continue": "off",
|
||||
"no-loop-func": "off",
|
||||
"no-mixed-operators": "off",
|
||||
"no-param-reassign": "off",
|
||||
"no-param-reassign":"off",
|
||||
"no-plusplus": "off",
|
||||
"no-regex-spaces": "off",
|
||||
"no-restricted-globals": "off",
|
||||
"no-restricted-syntax": "off",
|
||||
"no-return-assign": "off",
|
||||
"no-underscore-dangle": "off",
|
||||
"node/no-missing-import": "off",
|
||||
"no-promise-executor-return": "off",
|
||||
"node/no-missing-import": ["error", { "tryExtensions": [".js", ".json", ".ts"] }],
|
||||
"node/no-unpublished-import": "off",
|
||||
"node/no-unpublished-require": "off",
|
||||
"node/no-unsupported-features/es-syntax": "off",
|
||||
"no-lonely-if": "off",
|
||||
"node/shebang": "off",
|
||||
"object-curly-newline": "off",
|
||||
"prefer-destructuring": "off",
|
||||
"radix": "off",
|
||||
"object-curly-newline": "off"
|
||||
"prefer-template":"off",
|
||||
"promise/always-return": "off",
|
||||
"promise/catch-or-return": "off",
|
||||
"promise/no-nesting": "off",
|
||||
"radix": "off"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
# These are supported funding model platforms
|
||||
|
||||
github: [vladmandic]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
|
@ -1,3 +1,2 @@
|
|||
node_modules
|
||||
pnpm-lock.yaml
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
node_modules
|
||||
pnpm-lock.yaml
|
||||
|
||||
typedoc
|
||||
test
|
||||
types/lib
|
||||
|
|
6
.npmrc
6
.npmrc
|
@ -1 +1,5 @@
|
|||
force = true
|
||||
force=true
|
||||
production=true
|
||||
legacy-peer-deps=true
|
||||
strict-peer-dependencies=false
|
||||
node-options='--no-deprecation'
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"typescript.tsdk": "node_modules/typescript/lib"
|
||||
}
|
301
CHANGELOG.md
301
CHANGELOG.md
|
@ -1,19 +1,194 @@
|
|||
# @vladmandic/face-api
|
||||
|
||||
Version: **1.2.2**
|
||||
Description: **FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS**
|
||||
|
||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||
License: **MIT** </LICENSE>
|
||||
Repository: **<git+https://github.com/vladmandic/face-api.git>**
|
||||
|
||||
Version: **1.7.15**
|
||||
Description: **FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS**
|
||||
|
||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||
License: **MIT**
|
||||
Repository: **<https://github.com/vladmandic/face-api>**
|
||||
|
||||
## Changelog
|
||||
|
||||
### **1.7.15** 2025/02/05 mandic00@live.com
|
||||
|
||||
|
||||
### **origin/master** 2024/09/10 mandic00@live.com
|
||||
|
||||
|
||||
### **1.7.14** 2024/09/10 mandic00@live.com
|
||||
|
||||
- rebuild
|
||||
- merge pull request #188 from rebser/master
|
||||
- fixing leaking eventhandlers when using htmlcanvaselement
|
||||
- rebuild types
|
||||
- rebuild
|
||||
|
||||
### **1.7.13** 2024/01/17 mandic00@live.com
|
||||
|
||||
- merge pull request #186 from khwalkowicz/master
|
||||
- feat: enable noimplicitany
|
||||
|
||||
### **release: 1.7.12** 2023/06/12 mandic00@live.com
|
||||
|
||||
|
||||
### **1.7.12** 2023/06/12 mandic00@live.com
|
||||
|
||||
|
||||
### **1.7.11** 2023/05/08 mandic00@live.com
|
||||
|
||||
|
||||
### **1.7.10** 2023/03/21 mandic00@live.com
|
||||
|
||||
- change typedefs
|
||||
|
||||
### **1.7.9** 2023/01/29 mandic00@live.com
|
||||
|
||||
|
||||
### **1.7.8** 2023/01/06 mandic00@live.com
|
||||
|
||||
|
||||
### **1.7.7** 2022/12/01 mandic00@live.com
|
||||
|
||||
|
||||
### **1.7.6** 2022/10/18 mandic00@live.com
|
||||
|
||||
- fix face angles (yaw, pitch, & roll) accuracy (#130)
|
||||
|
||||
### **1.7.5** 2022/10/09 mandic00@live.com
|
||||
|
||||
- create funding.yml
|
||||
- add node-wasm demo
|
||||
|
||||
### **1.7.4** 2022/09/25 mandic00@live.com
|
||||
|
||||
- improve face compare performance
|
||||
|
||||
### **1.7.3** 2022/08/24 mandic00@live.com
|
||||
|
||||
- refresh release
|
||||
|
||||
### **1.7.2** 2022/08/23 mandic00@live.com
|
||||
|
||||
- document and remove optional dependencies
|
||||
|
||||
### **release: 1.7.1** 2022/07/25 mandic00@live.com
|
||||
|
||||
|
||||
### **1.7.1** 2022/07/25 mandic00@live.com
|
||||
|
||||
- refactor dependencies
|
||||
- full rebuild
|
||||
|
||||
### **1.6.11** 2022/05/24 mandic00@live.com
|
||||
|
||||
|
||||
### **1.6.10** 2022/05/24 mandic00@live.com
|
||||
|
||||
|
||||
### **1.6.9** 2022/05/18 mandic00@live.com
|
||||
|
||||
|
||||
### **1.6.8** 2022/05/09 mandic00@live.com
|
||||
|
||||
- exclude impossible detected face boxes
|
||||
|
||||
### **1.6.7** 2022/04/01 mandic00@live.com
|
||||
|
||||
- fixed typo error (#97)
|
||||
|
||||
### **1.6.6** 2022/03/04 mandic00@live.com
|
||||
|
||||
|
||||
### **1.6.5** 2022/02/07 mandic00@live.com
|
||||
|
||||
|
||||
### **1.6.4** 2022/01/14 mandic00@live.com
|
||||
|
||||
- add node with wasm build target
|
||||
|
||||
### **1.6.3** 2022/01/06 mandic00@live.com
|
||||
|
||||
|
||||
### **1.6.2** 2022/01/01 mandic00@live.com
|
||||
|
||||
|
||||
### **1.6.1** 2021/12/09 mandic00@live.com
|
||||
|
||||
- rebuild
|
||||
- release preview
|
||||
- switch to custom tfjs and new typedefs
|
||||
- rebuild
|
||||
|
||||
### **1.5.8** 2021/11/30 mandic00@live.com
|
||||
|
||||
|
||||
### **1.5.7** 2021/10/28 mandic00@live.com
|
||||
|
||||
|
||||
### **1.5.6** 2021/10/22 mandic00@live.com
|
||||
|
||||
|
||||
### **release: 1.5.5** 2021/10/19 mandic00@live.com
|
||||
|
||||
|
||||
### **1.5.5** 2021/10/19 mandic00@live.com
|
||||
|
||||
- allow backend change in demo via url params
|
||||
- add node-match demo
|
||||
- fix face matcher
|
||||
|
||||
### **1.5.4** 2021/09/29 mandic00@live.com
|
||||
|
||||
|
||||
### **1.5.3** 2021/09/16 mandic00@live.com
|
||||
|
||||
- simplify tfjs imports
|
||||
- reduce bundle size
|
||||
- enable webgl uniforms
|
||||
|
||||
### **1.5.2** 2021/09/10 mandic00@live.com
|
||||
|
||||
- redesign build platform
|
||||
|
||||
### **1.5.1** 2021/09/08 mandic00@live.com
|
||||
|
||||
|
||||
### **1.4.2** 2021/08/31 mandic00@live.com
|
||||
|
||||
|
||||
### **release: 1.4.1** 2021/07/29 mandic00@live.com
|
||||
|
||||
|
||||
### **1.4.1** 2021/07/29 mandic00@live.com
|
||||
|
||||
|
||||
### **release: 1.3.1** 2021/06/18 mandic00@live.com
|
||||
|
||||
|
||||
### **1.3.1** 2021/06/08 mandic00@live.com
|
||||
|
||||
- fix face expression detection (#56)
|
||||
- add buffertovideo
|
||||
- fix git conflicts
|
||||
- fix tsc error (#55)
|
||||
- force typescript 4.2 due to typedoc incompatibility with ts 4.3
|
||||
|
||||
### **1.2.5** 2021/05/27 mandic00@live.com
|
||||
|
||||
- add buffertovideo and fetchvideo (#54)
|
||||
|
||||
### **1.2.4** 2021/05/18 mandic00@live.com
|
||||
|
||||
|
||||
### **1.2.3** 2021/05/04 mandic00@live.com
|
||||
|
||||
|
||||
### **update for tfjs 3.6.0** 2021/04/30 mandic00@live.com
|
||||
|
||||
|
||||
### **1.2.2** 2021/04/30 mandic00@live.com
|
||||
|
||||
|
||||
### **origin/master** 2021/04/26 mandic00@live.com
|
||||
|
||||
- add node-wasm demo
|
||||
- accept uri as input to demo node and node-canvas
|
||||
- major version full rebuild
|
||||
|
||||
|
@ -82,111 +257,61 @@ Repository: **<git+https://github.com/vladmandic/face-api.git>**
|
|||
|
||||
- add badges
|
||||
- optimize for npm
|
||||
|
||||
### **0.30.6** 2021/03/08 mandic00@live.com
|
||||
|
||||
- 0.30.6
|
||||
- added typings for face angle
|
||||
- disable landmark printing
|
||||
|
||||
### **0.30.5** 2021/03/07 mandic00@live.com
|
||||
|
||||
- 0.30.5
|
||||
- enabled live demo on gitpages
|
||||
|
||||
### **0.30.4** 2021/03/07 mandic00@live.com
|
||||
|
||||
- 0.30.4
|
||||
- added face angle calculations
|
||||
- added documentation
|
||||
- package update
|
||||
|
||||
### **0.30.3** 2021/03/04 mandic00@live.com
|
||||
|
||||
|
||||
### **0.30.2** 2021/02/26 mandic00@live.com
|
||||
|
||||
|
||||
### **0.30.1** 2021/02/25 mandic00@live.com
|
||||
|
||||
|
||||
### **0.13.3** 2021/02/21 mandic00@live.com
|
||||
|
||||
- 0.30.3
|
||||
- 0.30.2
|
||||
- 0.30.1
|
||||
- 0.13.3
|
||||
- added note-cpu target
|
||||
- merge pull request #39 from xemle/feature/node-cpu
|
||||
- add node-cpu build for non supported systems of libtensorflow
|
||||
|
||||
### **0.13.2** 2021/02/20 mandic00@live.com
|
||||
|
||||
|
||||
### **0.13.1** 2021/02/20 mandic00@live.com
|
||||
|
||||
|
||||
### **0.12.10** 2021/02/20 mandic00@live.com
|
||||
|
||||
- 0.13.2
|
||||
- 0.13.1
|
||||
- 0.12.10
|
||||
- exception handling
|
||||
- 0.12.9
|
||||
- exception handling
|
||||
- 0.12.8
|
||||
- exception handling
|
||||
|
||||
### **0.12.9** 2021/02/20 mandic00@live.com
|
||||
|
||||
|
||||
### **0.12.8** 2021/02/20 mandic00@live.com
|
||||
|
||||
|
||||
### **0.12.7** 2021/02/17 mandic00@live.com
|
||||
|
||||
- 0.12.7
|
||||
|
||||
### **0.12.6** 2021/02/13 mandic00@live.com
|
||||
|
||||
|
||||
### **0.12.5** 2021/02/12 mandic00@live.com
|
||||
|
||||
|
||||
### **0.12.4** 2021/02/06 mandic00@live.com
|
||||
|
||||
|
||||
### **0.12.3** 2021/02/06 mandic00@live.com
|
||||
|
||||
|
||||
### **0.12.2** 2021/02/02 mandic00@live.com
|
||||
|
||||
- 0.12.6
|
||||
- 0.12.5
|
||||
- 0.12.4
|
||||
- 0.12.3
|
||||
- 0.12.2
|
||||
|
||||
### **update for tfjs 3.0.0** 2021/01/29 mandic00@live.com
|
||||
|
||||
|
||||
### **0.12.1** 2021/01/29 mandic00@live.com
|
||||
|
||||
- 0.12.1
|
||||
- rebuild
|
||||
|
||||
### **0.11.6** 2021/01/24 mandic00@live.com
|
||||
|
||||
- 0.11.6
|
||||
- add check for null face descriptor
|
||||
- merge pull request #34 from patrickhulce/patch-1
|
||||
- fix: return empty descriptor for zero-sized faces
|
||||
|
||||
### **0.11.5** 2021/01/22 mandic00@live.com
|
||||
|
||||
|
||||
### **0.11.4** 2021/01/22 mandic00@live.com
|
||||
|
||||
|
||||
### **0.11.3** 2021/01/20 mandic00@live.com
|
||||
|
||||
- 0.11.5
|
||||
- 0.11.4
|
||||
- 0.11.3
|
||||
- fix typo
|
||||
- enable full minification
|
||||
|
||||
### **0.11.2** 2021/01/12 mandic00@live.com
|
||||
|
||||
- 0.11.2
|
||||
- full rebuild
|
||||
|
||||
### **0.11.1** 2021/01/10 mandic00@live.com
|
||||
|
||||
- 0.11.1
|
||||
- added live webcam demo
|
||||
|
||||
### **0.10.2** 2021/01/03 mandic00@live.com
|
||||
|
||||
- 0.10.2
|
||||
- ts linting
|
||||
- version bump
|
||||
|
||||
### **0.10.1** 2020/12/23 mandic00@live.com
|
||||
|
||||
- 0.10.1
|
||||
- full re-lint and typings generation
|
||||
- rebuild
|
||||
|
||||
|
|
217
README.md
217
README.md
|
@ -53,18 +53,18 @@ Example can be accessed directly using Git pages using URL:
|
|||
|
||||
### NodeJS
|
||||
|
||||
Three NodeJS examples are:
|
||||
NodeJS examples are:
|
||||
|
||||
- `/demo/node-simple.js`:
|
||||
Simplest possible NodeJS demo for FaceAPI in under 30 lines of JavaScript code
|
||||
- `/demo/node.js`:
|
||||
Regular usage of `FaceAPI` from `NodeJS`
|
||||
Using `TFJS` native methods to load images without external dependencies
|
||||
- `/demo/node-canvas.js`:
|
||||
Regular usage of `FaceAPI` from `NodeJS`
|
||||
Using `TFJS` native methods to load images without external dependencies
|
||||
- `/demo/node-canvas.js` and `/demo/node-image.js`:
|
||||
Using external `canvas` module to load images
|
||||
Which also allows for image drawing and saving inside `NodeJS` environment
|
||||
- `/demo/node-wasm.js`:
|
||||
Same as `node-canvas`, but using `WASM` backend in `NodeJS` environment
|
||||
Because why not :)
|
||||
Which also allows for image drawing and saving inside `NodeJS` environment
|
||||
- `/demo/node-match.js`:
|
||||
Simple demo that compares face similarity from a given image
|
||||
to a second image or list of images in a folder
|
||||
- `/demo/node-multiprocess.js`:
|
||||
Multiprocessing showcase that uses pool of worker processes
|
||||
(`node-multiprocess-worker.js`)
|
||||
|
@ -104,8 +104,11 @@ Three NodeJS examples are:
|
|||
2021-03-14 08:42:09 STATE: Main: worker exit: 1888019 0
|
||||
```
|
||||
|
||||
Note that `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu`
|
||||
must be installed before using NodeJS example
|
||||
### NodeJS Notes
|
||||
- Supported NodeJS versions are **14** up to **22**
|
||||
NodeJS version **23** and higher are not supported due to incompatibility with TensorFlow/JS
|
||||
- `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu`
|
||||
must be installed before using any **NodeJS** examples
|
||||
|
||||
<br><hr><br>
|
||||
|
||||
|
@ -133,8 +136,6 @@ Simply include latest version of `FaceAPI` directly from a CDN in your HTML:
|
|||
*without* TFJS pre-bundled
|
||||
- `dist/face-api.node-gpu.js`: CommonJS format for server-side NodeJS execution
|
||||
*without* TFJS pre-bundled and optimized for CUDA GPU acceleration
|
||||
- `dist/face-api.node-cpu.js`: CommonJS format for server-side NodeJS execution
|
||||
*without* TFJS pre-bundled and using JS engine for platforms where tensorflow binary library version is not available
|
||||
|
||||
Defaults are:
|
||||
|
||||
|
@ -151,7 +152,7 @@ Bundled `TFJS` can be used directly via export: `faceapi.tf`
|
|||
Reason for additional `nobundle` version is if you want to
|
||||
include a specific version of TFJS and not rely on pre-packaged one
|
||||
|
||||
`FaceAPI` is compatible with TFJS 2.0+
|
||||
`FaceAPI` is compatible with TFJS 2.0+ and TFJS 3.0+
|
||||
|
||||
All versions include `sourcemap`
|
||||
|
||||
|
@ -260,7 +261,7 @@ If you want to GPU Accelerated execution in NodeJS, you must have CUDA libraries
|
|||
Then install appropriate version of `FaceAPI`:
|
||||
|
||||
```shell
|
||||
npm install @tensorflow/tfjs-node
|
||||
npm install @tensorflow/tfjs-node-gpu
|
||||
npm install @vladmandic/face-api
|
||||
```
|
||||
|
||||
|
@ -271,18 +272,24 @@ And then use with:
|
|||
const faceapi = require('@vladmandic/face-api/dist/face-api.node-gpu.js'); // this loads face-api version with correct bindings for tfjs-node-gpu
|
||||
```
|
||||
|
||||
If you want to use `FaceAPI` in a NodeJS on platforms where NodeJS binary libraries are not supported, you can use JavaScript CPU backend.
|
||||
If you want to use `FaceAPI` in a NodeJS on platforms where **tensorflow** binary libraries are not supported, you can use NodeJS **WASM** backend.
|
||||
|
||||
```shell
|
||||
npm install @tensorflow/tfjs
|
||||
npm install @tensorflow/tfjs-backend-wasm
|
||||
npm install @vladmandic/face-api
|
||||
```
|
||||
|
||||
And then use with:
|
||||
|
||||
```js
|
||||
const tf = require('@tensorflow/tfjs')
|
||||
const faceapi = require('@vladmandic/face-api/dist/face-api.node-cpu.js');
|
||||
const tf = require('@tensorflow/tfjs');
|
||||
const wasm = require('@tensorflow/tfjs-backend-wasm');
|
||||
const faceapi = require('@vladmandic/face-api/dist/face-api.node-wasm.js'); // use this when using face-api in dev mode
|
||||
wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/');
|
||||
await tf.setBackend('wasm');
|
||||
await tf.ready();
|
||||
...
|
||||
```
|
||||
|
||||
If you want to use graphical functions inside NodeJS,
|
||||
|
@ -310,12 +317,14 @@ faceapi.env.monkeyPatch({ Canvas, Image, ImageData })
|
|||
|
||||
## Weights
|
||||
|
||||
Pretrained models and their weights are includes in `./model`.
|
||||
Pretrained models and their weights are included in `./model`.
|
||||
|
||||
<br><hr><br>
|
||||
|
||||
## Test & Dev Web Server
|
||||
|
||||
To install development dependencies, use `npm install --production=false`
|
||||
|
||||
Built-in test&dev web server can be started using
|
||||
|
||||
```shell
|
||||
|
@ -327,16 +336,47 @@ By default it starts HTTP server on port 8000 and HTTPS server on port 8001 and
|
|||
- <https://localhost:8001/demo/index.html>
|
||||
- <https://localhost:8001/demo/webcam.html>
|
||||
|
||||
```json
|
||||
2021-03-14 08:41:09 INFO: @vladmandic/face-api version 1.0.2
|
||||
2021-03-14 08:41:09 INFO: User: vlado Platform: linux Arch: x64 Node: v15.7.0
|
||||
2021-03-14 08:41:09 INFO: Build: file startup all target: es2018
|
||||
2021-03-14 08:41:09 STATE: HTTP server listening: 8000
|
||||
2021-03-14 08:41:09 STATE: HTTP2 server listening: 8001
|
||||
2021-03-14 08:41:09 STATE: Monitoring: [ 'package.json', 'demo', 'src', [length]: 3 ]
|
||||
2021-03-14 08:41:10 STATE: Build for: browserBundle type: tfjs: { modules: 1258, moduleBytes: 4040087, imports: 7, importBytes: 276, outputBytes: 1072314, outputFiles: 'dist/tfjs.esm.js' }
|
||||
2021-03-14 08:41:10 STATE: Build for: browserBundle type: iife: { imports: 160, importBytes: 1305679, outputBytes: 1151683, outputFiles: 'dist/face-api.js' }
|
||||
2021-03-14 08:41:10 STATE: Build for: browserBundle type: esm: { imports: 160, importBytes: 1305679, outputBytes: 1151520, outputFiles: 'dist/face-api.esm.js' }
|
||||
```js
|
||||
2022-01-14 09:56:19 INFO: @vladmandic/face-api version 1.6.4
|
||||
2022-01-14 09:56:19 INFO: User: vlado Platform: linux Arch: x64 Node: v17.2.0
|
||||
2022-01-14 09:56:19 INFO: Application: { name: '@vladmandic/face-api', version: '1.6.4' }
|
||||
2022-01-14 09:56:19 INFO: Environment: { profile: 'development', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true }
|
||||
2022-01-14 09:56:19 INFO: Toolchain: { build: '0.6.7', esbuild: '0.14.11', typescript: '4.5.4', typedoc: '0.22.10', eslint: '8.6.0' }
|
||||
2022-01-14 09:56:19 INFO: Build: { profile: 'development', steps: [ 'serve', 'watch', 'compile' ] }
|
||||
2022-01-14 09:56:19 STATE: WebServer: { ssl: false, port: 8000, root: '.' }
|
||||
2022-01-14 09:56:19 STATE: WebServer: { ssl: true, port: 8001, root: '.', sslKey: 'build/cert/https.key', sslCrt: 'build/cert/https.crt' }
|
||||
2022-01-14 09:56:19 STATE: Watch: { locations: [ 'src/**', 'README.md', 'src/**', 'src/**' ] }
|
||||
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 1276 }
|
||||
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234787, outputBytes: 175203 }
|
||||
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 1296 }
|
||||
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234807, outputBytes: 175219 }
|
||||
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 1367 }
|
||||
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234878, outputBytes: 175294 }
|
||||
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 1662 }
|
||||
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 2172, outputBytes: 811 }
|
||||
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234322, outputBytes: 169437 }
|
||||
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 2172, outputBytes: 2444105 }
|
||||
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 2677616, outputBytes: 1252572 }
|
||||
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 2677616, outputBytes: 2435063 }
|
||||
2022-01-14 09:56:20 INFO: Listening...
|
||||
...
|
||||
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/html', size: 1047, url: '/', remote: '::1' }
|
||||
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 6919, url: '/index.js', remote: '::1' }
|
||||
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 2435063, url: '/dist/face-api.esm.js', remote: '::1' }
|
||||
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 4125244, url: '/dist/face-api.esm.js.map', remote: '::1' }
|
||||
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 3219, url: '/model/tiny_face_detector_model-weights_manifest.json', remote: '::1' }
|
||||
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 193321, url: '/model/tiny_face_detector_model.bin', remote: '::1' }
|
||||
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 28233, url: '/model/ssd_mobilenetv1_model-weights_manifest.json', remote: '::1' }
|
||||
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 5616957, url: '/model/ssd_mobilenetv1_model.bin', remote: '::1' }
|
||||
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8392, url: '/model/age_gender_model-weights_manifest.json', remote: '::1' }
|
||||
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 429708, url: '/model/age_gender_model.bin', remote: '::1' }
|
||||
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8485, url: '/model/face_landmark_68_model-weights_manifest.json', remote: '::1' }
|
||||
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 356840, url: '/model/face_landmark_68_model.bin', remote: '::1' }
|
||||
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 19615, url: '/model/face_recognition_model-weights_manifest.json', remote: '::1' }
|
||||
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 6444032, url: '/model/face_recognition_model.bin', remote: '::1' }
|
||||
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 6980, url: '/model/face_expression_model-weights_manifest.json', remote: '::1' }
|
||||
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 329468, url: '/model/face_expression_model.bin', remote: '::1' }
|
||||
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'image/jpeg', size: 144516, url: '/sample1.jpg', remote: '::1' }
|
||||
```
|
||||
|
||||
<br><hr><br>
|
||||
|
@ -360,35 +400,41 @@ cd face-api
|
|||
Then install all dependencies and run rebuild:
|
||||
|
||||
```shell
|
||||
npm install
|
||||
npm install --production=false
|
||||
npm run build
|
||||
```
|
||||
|
||||
Build process uses script `build.js` that creates optimized build for each target:
|
||||
Build process uses `@vladmandic/build` module that creates optimized build for each target:
|
||||
|
||||
```text
|
||||
> @vladmandic/face-api@1.0.2 build
|
||||
> rimraf dist/* types/* typedoc/* && node server/build.js
|
||||
```
|
||||
```js
|
||||
> @vladmandic/face-api@1.7.1 build /home/vlado/dev/face-api
|
||||
> node build.js
|
||||
|
||||
```json
|
||||
2021-03-14 08:39:21 INFO: @vladmandic/face-api version 1.0.2
|
||||
2021-03-14 08:39:21 INFO: User: vlado Platform: linux Arch: x64 Node: v15.7.0
|
||||
2021-03-14 08:39:21 INFO: Build: file startup all target: es2018
|
||||
2021-03-14 08:39:21 STATE: Build for: node type: tfjs: { imports: 1, importBytes: 143, outputBytes: 731, outputFiles: 'dist/tfjs.esm.js' }
|
||||
2021-03-14 08:39:21 STATE: Build for: node type: node: { imports: 160, importBytes: 234096, outputBytes: 85371, outputFiles: 'dist/face-api.node.js' }
|
||||
2021-03-14 08:39:21 STATE: Build for: nodeGPU type: tfjs: { imports: 1, importBytes: 147, outputBytes: 735, outputFiles: 'dist/tfjs.esm.js' }
|
||||
2021-03-14 08:39:21 STATE: Build for: nodeGPU type: node: { imports: 160, importBytes: 234100, outputBytes: 85379, outputFiles: 'dist/face-api.node-gpu.js' }
|
||||
2021-03-14 08:39:21 STATE: Build for: nodeCPU type: tfjs: { imports: 1, importBytes: 138, outputBytes: 726, outputFiles: 'dist/tfjs.esm.js' }
|
||||
2021-03-14 08:39:21 STATE: Build for: nodeCPU type: node: { imports: 160, importBytes: 234091, outputBytes: 85370, outputFiles: 'dist/face-api.node-cpu.js' }
|
||||
2021-03-14 08:39:21 STATE: Build for: browserNoBundle type: tfjs: { imports: 1, importBytes: 276, outputBytes: 244, outputFiles: 'dist/tfjs.esm.js' }
|
||||
2021-03-14 08:39:21 STATE: Build for: browserNoBundle type: esm: { imports: 160, importBytes: 233609, outputBytes: 82634, outputFiles: 'dist/face-api.esm-nobundle.js' }
|
||||
2021-03-14 08:39:22 STATE: Build for: browserBundle type: tfjs: { modules: 1258, moduleBytes: 4040087, imports: 7, importBytes: 276, outputBytes: 1072314, outputFiles: 'dist/tfjs.esm.js' }
|
||||
2021-03-14 08:39:22 STATE: Build for: browserBundle type: iife: { imports: 160, importBytes: 1305679, outputBytes: 1151683, outputFiles: 'dist/face-api.js' }
|
||||
2021-03-14 08:39:22 STATE: Build for: browserBundle type: esm: { imports: 160, importBytes: 1305679, outputBytes: 1151520, outputFiles: 'dist/face-api.esm.js' }
|
||||
2021-03-14 08:39:22 INFO: Compile typings: [ 'src/index.ts', [length]: 1 ]
|
||||
2021-03-14 08:39:27 INFO: Update Change log: [ '/home/vlado/dev/face-api/CHANGELOG.md', [length]: 1 ]
|
||||
2021-03-14 08:39:27 INFO: Generate TypeDocs: [ 'src/index.ts', [length]: 1 ]
|
||||
2022-07-25 08:21:05 INFO: Application: { name: '@vladmandic/face-api', version: '1.7.1' }
|
||||
2022-07-25 08:21:05 INFO: Environment: { profile: 'production', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true }
|
||||
2022-07-25 08:21:05 INFO: Toolchain: { build: '0.7.7', esbuild: '0.14.50', typescript: '4.7.4', typedoc: '0.23.9', eslint: '8.20.0' }
|
||||
2022-07-25 08:21:05 INFO: Build: { profile: 'production', steps: [ 'clean', 'compile', 'typings', 'typedoc', 'lint', 'changelog' ] }
|
||||
2022-07-25 08:21:05 STATE: Clean: { locations: [ 'dist/*', 'typedoc/*', 'types/lib/src' ] }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 614 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234137, outputBytes: 85701 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 618 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234141, outputBytes: 85705 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 670 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234193, outputBytes: 85755 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 400 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 910, outputBytes: 527 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234050, outputBytes: 82787 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 910, outputBytes: 1184871 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 1418394, outputBytes: 1264631 }
|
||||
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 1418394, outputBytes: 1264150 }
|
||||
2022-07-25 08:21:07 STATE: Typings: { input: 'src/index.ts', output: 'types/lib', files: 93 }
|
||||
2022-07-25 08:21:09 STATE: TypeDoc: { input: 'src/index.ts', output: 'typedoc', objects: 154, generated: true }
|
||||
2022-07-25 08:21:13 STATE: Lint: { locations: [ 'src/' ], files: 174, errors: 0, warnings: 0 }
|
||||
2022-07-25 08:21:14 STATE: ChangeLog: { repository: 'https://github.com/vladmandic/face-api', branch: 'master', output: 'CHANGELOG.md' }
|
||||
2022-07-25 08:21:14 INFO: Done...
|
||||
2022-07-25 08:21:14 STATE: Copy: { input: 'types/lib/dist/tfjs.esm.d.ts' }
|
||||
2022-07-25 08:21:15 STATE: API-Extractor: { succeeeded: true, errors: 0, warnings: 417 }
|
||||
2022-07-25 08:21:15 INFO: FaceAPI Build complete...
|
||||
```
|
||||
|
||||
<br><hr><br>
|
||||
|
@ -403,18 +449,14 @@ Build process uses script `build.js` that creates optimized build for each targe
|
|||
|
||||
## Note
|
||||
|
||||
This is updated **face-api.js** with latest available TensorFlow/JS as the original is not compatible with **tfjs 2.0+**.
|
||||
This is updated **face-api.js** with latest available TensorFlow/JS as the original is not compatible with **tfjs >=2.0**.
|
||||
Forked from [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2** which was released on March 22nd, 2020
|
||||
|
||||
Currently based on **`TensorFlow/JS` 3.6.0**
|
||||
*Why?* I needed a FaceAPI that does not cause version conflict with newer versions of TensorFlow
|
||||
And since the original FaceAPI was open-source, I've released this version as well
|
||||
|
||||
*Why?* I needed FaceAPI that does not cause version conflict with newer versions of TensorFlow
|
||||
And since original FaceAPI was open-source, I've released this version as well
|
||||
|
||||
Changes ended up being too large for a simple pull request
|
||||
and it ended up being a full-fledged version on its own
|
||||
|
||||
Plus many features were added since original inception
|
||||
Changes ended up being too large for a simple pull request and it ended up being a full-fledged version on its own
|
||||
Plus many features were added since the original inception
|
||||
|
||||
Although a lot of work has gone into this version of `FaceAPI` and it will continue to be maintained,
|
||||
at this time it is completely superseded by my newer library `Human` which covers the same use cases,
|
||||
|
@ -429,29 +471,34 @@ but extends it with newer AI models, additional detection details, compatibility
|
|||
|
||||
Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2**:
|
||||
|
||||
- Compatible with `TensorFlow/JS 2.0+ & 3.0+`
|
||||
- Compatible with `WebGL`, `CPU` and `WASM` TFJS Browser backends
|
||||
- Compatible with both `tfjs-node` and `tfjs-node-gpu` TFJS NodeJS backends
|
||||
- Updated all type castings for TypeScript type checking to `TypeScript 4.2`
|
||||
- Switched bundling from `UMD` to `ESM` + `CommonJS` with fallback to `IIFE`
|
||||
Resulting code is optimized per-platform instead of being universal
|
||||
Fully tree shakable when imported as an `ESM` module
|
||||
Browser bundle process uses `ESBuild` instead of `Rollup`
|
||||
- Typescript build process now targets `ES2018` and instead of dual `ES5`/`ES6`
|
||||
Resulting code is clean ES2018 JavaScript without polyfills
|
||||
- Removed old tests, docs, examples
|
||||
- Removed old package dependencies (`karma`, `jasmine`, `babel`, etc.)
|
||||
- Updated all package dependencies
|
||||
- Updated TensorFlow/JS dependencies since backends were removed from `@tensorflow/tfjs-core`
|
||||
- Updated `mobileNetv1` model due to `batchNorm()` dependency
|
||||
- Added `version` class that returns JSON object with version of FaceAPI as well as linked TFJS
|
||||
- Added test/dev built-in HTTP & HTTPS Web server
|
||||
- Removed `mtcnn` and `tinyYolov2` models as they were non-functional in latest public version of `FaceAPI`
|
||||
Which means valid models are **tinyFaceDetector** and **mobileNetv1**
|
||||
*If there is a demand, I can re-implement them back.*
|
||||
- Added `face angle` calculations that returns `roll`, `yaw` and `pitch`
|
||||
- Added `typdoc` automatic API specification generation during build
|
||||
- Added `changelog` automatic generation during build
|
||||
- Compatible with `TensorFlow/JS 2.0+, 3.0+ and 4.0+`
|
||||
Currently using **`TensorFlow/JS` 4.16**
|
||||
Original `face-api.js` is based on `TFJS` **1.7.4**
|
||||
- Compatible with `WebGL`, `CPU` and `WASM` TFJS Browser backends
|
||||
- Compatible with both `tfjs-node` and `tfjs-node-gpu` TFJS NodeJS backends
|
||||
- Updated all type castings for TypeScript type checking to `TypeScript 5.3`
|
||||
- Switched bundling from `UMD` to `ESM` + `CommonJS` with fallback to `IIFE`
|
||||
Resulting code is optimized per-platform instead of being universal
|
||||
Fully tree shakable when imported as an `ESM` module
|
||||
Browser bundle process uses `ESBuild` instead of `Rollup`
|
||||
- Added separate `face-api` versions with `tfjs` pre-bundled and without `tfjs`
|
||||
When using `-nobundle` version, user can load any version of `tfjs` manually
|
||||
- Typescript build process now targets `ES2018` and instead of dual `ES5`/`ES6`
|
||||
Resulting code is clean ES2018 JavaScript without polyfills
|
||||
- Removed old tests, docs, examples
|
||||
- Removed old package dependencies (`karma`, `jasmine`, `babel`, etc.)
|
||||
- Updated all package dependencies
|
||||
- Updated TensorFlow/JS dependencies since backends were removed from `@tensorflow/tfjs-core`
|
||||
- Updated `mobileNetv1` model due to `batchNorm()` dependency
|
||||
- Added `version` class that returns JSON object with version of FaceAPI as well as linked TFJS
|
||||
- Added test/dev built-in HTTP & HTTPS Web server
|
||||
- Removed `mtcnn` and `tinyYolov2` models as they were non-functional in latest public version of `FaceAPI`
|
||||
Which means valid models are **tinyFaceDetector** and **mobileNetv1**
|
||||
*If there is a demand, I can re-implement them back.*
|
||||
- Added `face angle` calculations that returns `roll`, `yaw` and `pitch`
|
||||
- Added `typdoc` automatic API specification generation during build
|
||||
- Added `changelog` automatic generation during build
|
||||
- New process to generate **TypeDocs** bundle using API-Extractor
|
||||
|
||||
<br>
|
||||
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
{
|
||||
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
|
||||
"mainEntryPointFilePath": "types/lib/src/index.d.ts",
|
||||
"bundledPackages": ["@tensorflow/tfjs-core", "@tensorflow/tfjs-converter", "@types/offscreencanvas"],
|
||||
"compiler": {
|
||||
"skipLibCheck": false
|
||||
},
|
||||
"newlineKind": "lf",
|
||||
"dtsRollup": {
|
||||
"enabled": true,
|
||||
"untrimmedFilePath": "types/face-api.d.ts"
|
||||
},
|
||||
"docModel": { "enabled": false },
|
||||
"tsdocMetadata": {
|
||||
"enabled": false
|
||||
},
|
||||
"apiReport": { "enabled": false },
|
||||
"messages": {
|
||||
"compilerMessageReporting": {
|
||||
"default": {
|
||||
"logLevel": "warning"
|
||||
}
|
||||
},
|
||||
"extractorMessageReporting": {
|
||||
"default": {
|
||||
"logLevel": "warning"
|
||||
},
|
||||
"ae-missing-release-tag": {
|
||||
"logLevel": "none"
|
||||
}
|
||||
},
|
||||
"tsdocMessageReporting": {
|
||||
"default": {
|
||||
"logLevel": "warning"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
const fs = require('fs');
|
||||
const log = require('@vladmandic/pilogger');
|
||||
const Build = require('@vladmandic/build').Build;
|
||||
const APIExtractor = require('@microsoft/api-extractor');
|
||||
|
||||
const regEx = [
|
||||
{ search: 'types="@webgpu/types/dist"', replace: 'path="../src/types/webgpu.d.ts"' },
|
||||
{ search: 'types="offscreencanvas"', replace: 'path="../src/types/offscreencanvas.d.ts"' },
|
||||
];
|
||||
|
||||
function copyFile(src, dst) {
|
||||
if (!fs.existsSync(src)) {
|
||||
log.warn('Copy:', { input: src, output: dst });
|
||||
return;
|
||||
}
|
||||
log.state('Copy:', { input: src, output: dst });
|
||||
const buffer = fs.readFileSync(src);
|
||||
fs.writeFileSync(dst, buffer);
|
||||
}
|
||||
|
||||
function writeFile(str, dst) {
|
||||
log.state('Write:', { output: dst });
|
||||
fs.writeFileSync(dst, str);
|
||||
}
|
||||
|
||||
function regExFile(src, entries) {
|
||||
if (!fs.existsSync(src)) {
|
||||
log.warn('Filter:', { src });
|
||||
return;
|
||||
}
|
||||
log.state('Filter:', { input: src });
|
||||
for (const entry of entries) {
|
||||
const buffer = fs.readFileSync(src, 'UTF-8');
|
||||
const lines = buffer.split(/\r?\n/);
|
||||
const out = [];
|
||||
for (const line of lines) {
|
||||
if (line.includes(entry.search)) out.push(line.replace(entry.search, entry.replace));
|
||||
else out.push(line);
|
||||
}
|
||||
fs.writeFileSync(src, out.join('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
const apiIgnoreList = ['ae-forgotten-export', 'ae-unresolved-link', 'tsdoc-param-tag-missing-hyphen'];
|
||||
|
||||
async function main() {
|
||||
// run production build
|
||||
const build = new Build();
|
||||
await build.run('production');
|
||||
// patch tfjs typedefs
|
||||
log.state('Copy:', { input: 'types/lib/dist/tfjs.esm.d.ts' });
|
||||
copyFile('types/lib/dist/tfjs.esm.d.ts', 'dist/tfjs.esm.d.ts');
|
||||
// run api-extractor to create typedef rollup
|
||||
const extractorConfig = APIExtractor.ExtractorConfig.loadFileAndPrepare('api-extractor.json');
|
||||
const extractorResult = APIExtractor.Extractor.invoke(extractorConfig, {
|
||||
localBuild: true,
|
||||
showVerboseMessages: false,
|
||||
messageCallback: (msg) => {
|
||||
msg.handled = true;
|
||||
if (msg.logLevel === 'none' || msg.logLevel === 'verbose' || msg.logLevel === 'info') return;
|
||||
if (msg.sourceFilePath?.includes('/node_modules/')) return;
|
||||
if (apiIgnoreList.reduce((prev, curr) => prev || msg.messageId.includes(curr), false)) return;
|
||||
log.data('API', { level: msg.logLevel, category: msg.category, id: msg.messageId, file: msg.sourceFilePath, line: msg.sourceFileLine, text: msg.text });
|
||||
},
|
||||
});
|
||||
log.state('API-Extractor:', { succeeeded: extractorResult.succeeded, errors: extractorResult.errorCount, warnings: extractorResult.warningCount });
|
||||
regExFile('types/face-api.d.ts', regEx);
|
||||
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm-nobundle.d.ts');
|
||||
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm.d.ts');
|
||||
writeFile('export * from \'../types/face-api\';', 'dist/face-api.d.ts');
|
||||
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node.d.ts');
|
||||
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-gpu.d.ts');
|
||||
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-wasm.d.ts');
|
||||
log.info('FaceAPI Build complete...');
|
||||
}
|
||||
|
||||
main();
|
|
@ -11,7 +11,7 @@
|
|||
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
|
||||
<script src="./index.js" type="module"></script>
|
||||
</head>
|
||||
<body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0;">
|
||||
<body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0; overflow-x: hidden;">
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -1,25 +1,25 @@
|
|||
import * as faceapi from '../dist/face-api.esm.js';
|
||||
/**
|
||||
* FaceAPI Demo for Browsers
|
||||
* Loaded via `index.html`
|
||||
*/
|
||||
|
||||
import * as faceapi from '../dist/face-api.esm.js'; // use when in dev mode
|
||||
// import * as faceapi from '@vladmandic/face-api'; // use when downloading face-api as npm
|
||||
|
||||
// configuration options
|
||||
const modelPath = '../model/'; // path to model folder that will be loaded using http
|
||||
// const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http
|
||||
// const modelPath = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model/'; // path to model folder that will be loaded using http
|
||||
const imgSize = 800; // maximum image size in pixels
|
||||
const minScore = 0.3; // minimum score
|
||||
const maxResults = 10; // maximum number of results to return
|
||||
const samples = ['sample1.jpg', 'sample2.jpg', 'sample3.jpg', 'sample4.jpg', 'sample5.jpg', 'sample6.jpg']; // sample images to be loaded using http
|
||||
|
||||
// helper function to pretty-print json object to string
|
||||
function str(json) {
|
||||
let text = '<font color="lightblue">';
|
||||
text += json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '';
|
||||
text += '</font>';
|
||||
return text;
|
||||
}
|
||||
const str = (json) => (json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '');
|
||||
|
||||
// helper function to print strings to html document as a log
|
||||
function log(...txt) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(...txt);
|
||||
console.log(...txt); // eslint-disable-line no-console
|
||||
const div = document.getElementById('log');
|
||||
if (div) div.innerHTML += `<br>${txt}`;
|
||||
}
|
||||
|
@ -33,11 +33,9 @@ function faces(name, title, id, data) {
|
|||
canvas.style.position = 'absolute';
|
||||
canvas.style.left = `${img.offsetLeft}px`;
|
||||
canvas.style.top = `${img.offsetTop}px`;
|
||||
// @ts-ignore
|
||||
canvas.width = img.width;
|
||||
// @ts-ignore
|
||||
canvas.height = img.height;
|
||||
const ctx = canvas.getContext('2d');
|
||||
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
||||
if (!ctx) return;
|
||||
// draw title
|
||||
ctx.font = '1rem sans-serif';
|
||||
|
@ -53,6 +51,7 @@ function faces(name, title, id, data) {
|
|||
ctx.beginPath();
|
||||
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
|
||||
ctx.stroke();
|
||||
// draw text labels
|
||||
ctx.globalAlpha = 1;
|
||||
ctx.fillText(`${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 18);
|
||||
ctx.fillText(`${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 2);
|
||||
|
@ -72,8 +71,7 @@ function faces(name, title, id, data) {
|
|||
|
||||
// helper function to draw processed image and its results
|
||||
function print(title, img, data) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('Results:', title, img, data);
|
||||
console.log('Results:', title, img, data); // eslint-disable-line no-console
|
||||
const el = new Image();
|
||||
el.id = Math.floor(Math.random() * 100000).toString();
|
||||
el.src = img;
|
||||
|
@ -96,7 +94,7 @@ async function image(url) {
|
|||
const canvas = document.createElement('canvas');
|
||||
canvas.height = img.height;
|
||||
canvas.width = img.width;
|
||||
const ctx = canvas.getContext('2d');
|
||||
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
||||
if (ctx) ctx.drawImage(img, 0, 0, img.width, img.height);
|
||||
// return generated canvas to be used by tfjs during detection
|
||||
resolve(canvas);
|
||||
|
@ -111,18 +109,23 @@ async function main() {
|
|||
log('FaceAPI Test');
|
||||
|
||||
// if you want to use wasm backend location for wasm binaries must be specified
|
||||
// await faceapi.tf.setWasmPaths('../node_modules/@tensorflow/tfjs-backend-wasm/dist/');
|
||||
// await faceapi.tf.setBackend('wasm');
|
||||
// await faceapi.tf?.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${faceapi.tf.version_core}/dist/`);
|
||||
// await faceapi.tf?.setBackend('wasm');
|
||||
// log(`WASM SIMD: ${await faceapi.tf?.env().getAsync('WASM_HAS_SIMD_SUPPORT')} Threads: ${await faceapi.tf?.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') ? 'Multi' : 'Single'}`);
|
||||
|
||||
// default is webgl backend
|
||||
await faceapi.tf.setBackend('webgl');
|
||||
await faceapi.tf.ready();
|
||||
|
||||
// tfjs optimizations
|
||||
if (faceapi.tf?.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) faceapi.tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
|
||||
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
|
||||
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
|
||||
await faceapi.tf.enableProdMode();
|
||||
await faceapi.tf.ENV.set('DEBUG', false);
|
||||
await faceapi.tf.ready();
|
||||
|
||||
// check version
|
||||
log(`Version: FaceAPI ${str(faceapi?.version.faceapi || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
|
||||
log(`Version: FaceAPI ${str(faceapi?.version || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
|
||||
log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
|
||||
|
||||
// load face-api models
|
||||
|
@ -140,16 +143,9 @@ async function main() {
|
|||
const engine = await faceapi.tf.engine();
|
||||
log(`TF Engine State: ${str(engine.state)}`);
|
||||
|
||||
// const testT = faceapi.tf.tensor([0]);
|
||||
// const testF = testT.toFloat();
|
||||
// console.log(testT.print(), testF.print());
|
||||
// testT.dispose();
|
||||
// testF.dispose();
|
||||
|
||||
// loop through all images and try to process them
|
||||
log(`Start processing: ${samples.length} images ...<br>`);
|
||||
for (const img of samples) {
|
||||
// new line
|
||||
document.body.appendChild(document.createElement('br'));
|
||||
// load and resize image
|
||||
const canvas = await image(img);
|
||||
|
@ -163,7 +159,7 @@ async function main() {
|
|||
.withFaceDescriptors()
|
||||
.withAgeAndGender();
|
||||
// print results to screen
|
||||
print('TinyFace Detector', img, dataTinyYolo);
|
||||
print('TinyFace:', img, dataTinyYolo);
|
||||
// actual model execution
|
||||
const dataSSDMobileNet = await faceapi
|
||||
.detectAllFaces(canvas, optionsSSDMobileNet)
|
||||
|
@ -172,11 +168,9 @@ async function main() {
|
|||
.withFaceDescriptors()
|
||||
.withAgeAndGender();
|
||||
// print results to screen
|
||||
print('SSD MobileNet', img, dataSSDMobileNet);
|
||||
print('SSDMobileNet:', img, dataSSDMobileNet);
|
||||
} catch (err) {
|
||||
log(`Image: ${img} Error during processing ${str(err)}`);
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,15 +1,20 @@
|
|||
// @ts-nocheck
|
||||
/**
|
||||
* FaceAPI Demo for NodeJS
|
||||
* - Uses external library [canvas](https://www.npmjs.com/package/canvas) to decode image
|
||||
* - Loads image from provided param
|
||||
* - Outputs results to console
|
||||
*/
|
||||
|
||||
// canvas library provides full canvas (load/draw/write) functionality for nodejs
|
||||
// must be installed manually as it just a demo dependency and not actual face-api dependency
|
||||
const canvas = require('canvas'); // eslint-disable-line node/no-missing-require
|
||||
const fs = require('fs');
|
||||
const process = require('process');
|
||||
const path = require('path');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
||||
const process = require('process');
|
||||
const log = require('@vladmandic/pilogger');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars
|
||||
const tf = require('@tensorflow/tfjs-node');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
||||
const canvas = require('canvas');
|
||||
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
|
||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
|
||||
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||
|
||||
const modelPathRoot = '../model';
|
||||
const imgPathRoot = './demo'; // modify to include your sample images
|
||||
|
@ -52,11 +57,9 @@ async function main() {
|
|||
faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
|
||||
|
||||
await faceapi.tf.setBackend('tensorflow');
|
||||
await faceapi.tf.enableProdMode();
|
||||
await faceapi.tf.ENV.set('DEBUG', false);
|
||||
await faceapi.tf.ready();
|
||||
|
||||
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
|
||||
log.state(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf?.getBackend()}`);
|
||||
|
||||
log.info('Loading FaceAPI models');
|
||||
const modelPath = path.join(__dirname, modelPathRoot);
|
||||
|
@ -80,7 +83,7 @@ async function main() {
|
|||
for (const face of result) print(face);
|
||||
}
|
||||
const t1 = process.hrtime.bigint();
|
||||
log.info('Processed', numImages, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
|
||||
log.info('Processed', numImages, 'images in', Math.trunc(Number((t1 - t0).toString()) / 1000 / 1000), 'ms');
|
||||
} else {
|
||||
const param = process.argv[2];
|
||||
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
/**
|
||||
* FaceAPI demo that loads two images and finds similarity most prominant face in each image
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const tf = require('@tensorflow/tfjs-node');
|
||||
const faceapi = require('../dist/face-api.node');
|
||||
|
||||
let optionsSSDMobileNet;
|
||||
|
||||
const getDescriptors = async (imageFile) => {
|
||||
const buffer = fs.readFileSync(imageFile);
|
||||
const tensor = tf.node.decodeImage(buffer, 3);
|
||||
const faces = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||
.withFaceLandmarks()
|
||||
.withFaceDescriptors();
|
||||
tf.dispose(tensor);
|
||||
return faces.map((face) => face.descriptor);
|
||||
};
|
||||
|
||||
const main = async (file1, file2) => {
|
||||
console.log('input images:', file1, file2); // eslint-disable-line no-console
|
||||
await tf.ready();
|
||||
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model');
|
||||
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.5, maxResults: 1 });
|
||||
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
|
||||
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
|
||||
const desc1 = await getDescriptors(file1);
|
||||
const desc2 = await getDescriptors(file2);
|
||||
const distance = faceapi.euclideanDistance(desc1[0], desc2[0]); // only compare first found face in each image
|
||||
console.log('distance between most prominant detected faces:', distance); // eslint-disable-line no-console
|
||||
console.log('similarity between most prominant detected faces:', 1 - distance); // eslint-disable-line no-console
|
||||
};
|
||||
|
||||
main('demo/sample1.jpg', 'demo/sample2.jpg');
|
|
@ -1,31 +1,54 @@
|
|||
/**
|
||||
* FaceAPI Demo for NodeJS
|
||||
* - Uses external library [@canvas/image](https://www.npmjs.com/package/@canvas/image) to decode image
|
||||
* - Loads image from provided param
|
||||
* - Outputs results to console
|
||||
*/
|
||||
|
||||
// @canvas/image can decode jpeg, png, webp
|
||||
// must be installed manually as it just a demo dependency and not actual face-api dependency
|
||||
const image = require('@canvas/image'); // eslint-disable-line node/no-missing-require
|
||||
const fs = require('fs');
|
||||
const tf = require('@tensorflow/tfjs-node');
|
||||
const image = require('@canvas/image'); // @canvas/image can decode jpeg, png, webp
|
||||
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
|
||||
const log = require('@vladmandic/pilogger');
|
||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
|
||||
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||
|
||||
const modelPath = 'model/';
|
||||
const imageFile = 'demo/sample1.jpg';
|
||||
const ssdOptions = { minConfidence: 0.1, maxResults: 10 };
|
||||
|
||||
async function main() {
|
||||
log.header();
|
||||
const buffer = fs.readFileSync(imageFile); // read image from disk
|
||||
const canvas = await image.imageFromBuffer(buffer); // decode to canvas
|
||||
const imageData = image.getImageData(canvas); // read decoded image data from canvas
|
||||
console.log('image:', imageFile, canvas.width, canvas.height);
|
||||
log.info('image:', imageFile, canvas.width, canvas.height);
|
||||
|
||||
const tensor = tf.tidy(() => { // create tensor from image data
|
||||
const data = tf.tensor(Array.from(imageData.data), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array and flip to height x width
|
||||
const data = tf.tensor(Array.from(imageData?.data || []), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array and flip to height x width
|
||||
const channels = tf.split(data, 4, 2); // split rgba to channels
|
||||
const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb
|
||||
const reshape = tf.reshape(rgb, [1, canvas.height, canvas.width, 3]); // move extra dim from the end of tensor and use it as batch number instead
|
||||
return reshape;
|
||||
});
|
||||
console.log('tensor:', tensor.shape, tensor.size);
|
||||
log.info('tensor:', tensor.shape, tensor.size);
|
||||
|
||||
// load models
|
||||
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
|
||||
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
|
||||
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
|
||||
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
|
||||
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
||||
|
||||
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath); // load basic model only
|
||||
const optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options(ssdOptions); // create options object
|
||||
const result = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet); // run detection
|
||||
console.log('results:', result);
|
||||
const result = await faceapi // run detection
|
||||
.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||
.withFaceLandmarks()
|
||||
.withFaceExpressions()
|
||||
.withFaceDescriptors()
|
||||
.withAgeAndGender();
|
||||
log.data('results:', result.length);
|
||||
}
|
||||
|
||||
main();
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
/**
|
||||
* FaceAPI Demo for NodeJS
|
||||
* - Analyzes face descriptors from source (image file or folder containing multiple image files)
|
||||
* - Analyzes face descriptor from target
|
||||
* - Finds best match
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const log = require('@vladmandic/pilogger');
|
||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
|
||||
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||
|
||||
let optionsSSDMobileNet;
|
||||
const minConfidence = 0.1;
|
||||
const distanceThreshold = 0.5;
|
||||
const modelPath = 'model';
|
||||
const labeledFaceDescriptors = [];
|
||||
|
||||
async function initFaceAPI() {
|
||||
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
|
||||
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
|
||||
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
||||
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
|
||||
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults: 1 });
|
||||
}
|
||||
|
||||
async function getDescriptors(imageFile) {
|
||||
const buffer = fs.readFileSync(imageFile);
|
||||
const tensor = tf.node.decodeImage(buffer, 3);
|
||||
const faces = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||
.withFaceLandmarks()
|
||||
.withFaceExpressions()
|
||||
.withFaceDescriptors();
|
||||
tf.dispose(tensor);
|
||||
return faces.map((face) => face.descriptor);
|
||||
}
|
||||
|
||||
async function registerImage(inputFile) {
|
||||
if (!inputFile.toLowerCase().endsWith('jpg') && !inputFile.toLowerCase().endsWith('png') && !inputFile.toLowerCase().endsWith('gif')) return;
|
||||
log.data('Registered:', inputFile);
|
||||
const descriptors = await getDescriptors(inputFile);
|
||||
for (const descriptor of descriptors) {
|
||||
const labeledFaceDescriptor = new faceapi.LabeledFaceDescriptors(inputFile, [descriptor]);
|
||||
labeledFaceDescriptors.push(labeledFaceDescriptor);
|
||||
}
|
||||
}
|
||||
|
||||
async function findBestMatch(inputFile) {
|
||||
const matcher = new faceapi.FaceMatcher(labeledFaceDescriptors, distanceThreshold);
|
||||
const descriptors = await getDescriptors(inputFile);
|
||||
const matches = [];
|
||||
for (const descriptor of descriptors) {
|
||||
const match = await matcher.findBestMatch(descriptor);
|
||||
matches.push(match);
|
||||
}
|
||||
return matches;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
log.header();
|
||||
if (process.argv.length !== 4) {
|
||||
log.error(process.argv[1], 'Expected <source image or folder> <target image>');
|
||||
process.exit(1);
|
||||
}
|
||||
await initFaceAPI();
|
||||
log.info('Input:', process.argv[2]);
|
||||
if (fs.statSync(process.argv[2]).isFile()) {
|
||||
await registerImage(process.argv[2]); // register image
|
||||
} else if (fs.statSync(process.argv[2]).isDirectory()) {
|
||||
const dir = fs.readdirSync(process.argv[2]);
|
||||
for (const f of dir) await registerImage(path.join(process.argv[2], f)); // register all images in a folder
|
||||
}
|
||||
log.info('Comparing:', process.argv[3], 'Descriptors:', labeledFaceDescriptors.length);
|
||||
if (labeledFaceDescriptors.length > 0) {
|
||||
const bestMatch = await findBestMatch(process.argv[3]); // find best match to all registered images
|
||||
log.data('Match:', bestMatch);
|
||||
} else {
|
||||
log.warn('No registered faces');
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
|
@ -1,14 +1,16 @@
|
|||
// @ts-nocheck
|
||||
/**
|
||||
* FaceAPI Demo for NodeJS
|
||||
* - Used by `node-multiprocess.js`
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
||||
const log = require('@vladmandic/pilogger');
|
||||
|
||||
// workers actual import tfjs and faceapi modules
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
||||
const tf = require('@tensorflow/tfjs-node');
|
||||
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
|
||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
|
||||
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||
|
||||
// options used by faceapi
|
||||
const modelPathRoot = '../model';
|
||||
|
@ -53,7 +55,7 @@ async function main() {
|
|||
await faceapi.tf.enableProdMode();
|
||||
await faceapi.tf.ENV.set('DEBUG', false);
|
||||
await faceapi.tf.ready();
|
||||
log.state('Worker: PID:', process.pid, `TensorFlow/JS ${faceapi.tf.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf.getBackend()}`);
|
||||
log.state('Worker: PID:', process.pid, `TensorFlow/JS ${faceapi.tf.version_core} FaceAPI ${faceapi.version} Backend: ${faceapi.tf.getBackend()}`);
|
||||
|
||||
// and load and initialize facepi models
|
||||
const modelPath = path.join(__dirname, modelPathRoot);
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
// @ts-nocheck
|
||||
/**
|
||||
* FaceAPI Demo for NodeJS
|
||||
* - Starts multiple worker processes and uses them as worker pool to process all input images
|
||||
* - Images are enumerated in main process and sent for processing to worker processes via ipc
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
||||
const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features
|
||||
const child_process = require('child_process');
|
||||
// note that main process import faceapi or tfjs at all
|
||||
// note that main process does not need to import faceapi or tfjs at all as processing is done in a worker process
|
||||
|
||||
const imgPathRoot = './demo'; // modify to include your sample images
|
||||
const numWorkers = 4; // how many workers will be started
|
||||
|
@ -33,14 +36,14 @@ function waitCompletion() {
|
|||
if (activeWorkers > 0) setImmediate(() => waitCompletion());
|
||||
else {
|
||||
t[1] = process.hrtime.bigint();
|
||||
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(parseInt(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(parseInt(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(parseInt(t[1] - t[2]) / numImages / 1000000), 'ms');
|
||||
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(Number(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(Number(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(Number(t[1] - t[2]) / numImages / 1000000), 'ms');
|
||||
}
|
||||
}
|
||||
|
||||
function measureLatency() {
|
||||
t[3] = process.hrtime.bigint();
|
||||
const latencyInitialization = Math.trunc(parseInt(t[2] - t[0]) / 1000 / 1000);
|
||||
const latencyRoundTrip = Math.trunc(parseInt(t[3] - t[2]) / 1000 / 1000);
|
||||
const latencyInitialization = Math.trunc(Number(t[2] - t[0]) / 1000 / 1000);
|
||||
const latencyRoundTrip = Math.trunc(Number(t[3] - t[2]) / 1000 / 1000);
|
||||
log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip);
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
/**
|
||||
* FaceAPI Demo for NodeJS
|
||||
* - Loads image
|
||||
* - Outputs results to console
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||
|
||||
async function main() {
|
||||
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model'); // load models from a specific patch
|
||||
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
|
||||
await faceapi.nets.ageGenderNet.loadFromDisk('model');
|
||||
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
|
||||
await faceapi.nets.faceExpressionNet.loadFromDisk('model');
|
||||
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.1, maxResults: 10 }); // set model options
|
||||
const buffer = fs.readFileSync('demo/sample1.jpg'); // load jpg image as binary
|
||||
const decodeT = faceapi.tf.node.decodeImage(buffer, 3); // decode binary buffer to rgb tensor
|
||||
const expandT = faceapi.tf.expandDims(decodeT, 0); // add batch dimension to tensor
|
||||
const result = await faceapi.detectAllFaces(expandT, options) // run detection
|
||||
.withFaceLandmarks()
|
||||
.withFaceExpressions()
|
||||
.withFaceDescriptors()
|
||||
.withAgeAndGender();
|
||||
faceapi.tf.dispose([decodeT, expandT]); // dispose tensors to avoid memory leaks
|
||||
console.log({ result }); // eslint-disable-line no-console
|
||||
}
|
||||
|
||||
main();
|
|
@ -1,97 +1,53 @@
|
|||
// @ts-nocheck
|
||||
/**
|
||||
* FaceAPI Demo for NodeJS using WASM
|
||||
* - Loads WASM binaries from external CDN
|
||||
* - Loads image
|
||||
* - Outputs results to console
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const process = require('process');
|
||||
const path = require('path');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
||||
const log = require('@vladmandic/pilogger');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars
|
||||
const image = require('@canvas/image'); // eslint-disable-line node/no-missing-require
|
||||
const tf = require('@tensorflow/tfjs');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
||||
require('@tensorflow/tfjs-backend-wasm');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars
|
||||
const canvas = require('canvas');
|
||||
const faceapi = require('../dist/face-api.node-cpu.js'); // this is equivalent to '@vladmandic/faceapi'
|
||||
const wasm = require('@tensorflow/tfjs-backend-wasm');
|
||||
const faceapi = require('../dist/face-api.node-wasm.js'); // use this when using face-api in dev mode
|
||||
|
||||
const modelPathRoot = '../model';
|
||||
const imgPathRoot = './demo'; // modify to include your sample images
|
||||
const minConfidence = 0.15;
|
||||
const maxResults = 5;
|
||||
let optionsSSDMobileNet;
|
||||
|
||||
async function image(input) {
|
||||
const img = await canvas.loadImage(input);
|
||||
const c = canvas.createCanvas(img.width, img.height);
|
||||
const ctx = c.getContext('2d');
|
||||
ctx.drawImage(img, 0, 0, img.width, img.height);
|
||||
// const out = fs.createWriteStream('test.jpg');
|
||||
// const stream = c.createJPEGStream({ quality: 0.6, progressive: true, chromaSubsampling: true });
|
||||
// stream.pipe(out);
|
||||
return c;
|
||||
async function readImage(imageFile) {
|
||||
const buffer = fs.readFileSync(imageFile); // read image from disk
|
||||
const canvas = await image.imageFromBuffer(buffer); // decode to canvas
|
||||
const imageData = image.getImageData(canvas); // read decoded image data from canvas
|
||||
const tensor = tf.tidy(() => { // create tensor from image data
|
||||
const data = tf.tensor(Array.from(imageData?.data || []), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array and flip to height x width
|
||||
const channels = tf.split(data, 4, 2); // split rgba to channels
|
||||
const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb
|
||||
const squeeze = tf.squeeze(rgb); // move extra dim from the end of tensor and use it as batch number instead
|
||||
return squeeze;
|
||||
});
|
||||
console.log(`Image: ${imageFile} [${canvas.width} x ${canvas.height} Tensor: ${tensor.shape}, Size: ${tensor.size}`); // eslint-disable-line no-console
|
||||
return tensor;
|
||||
}
|
||||
|
||||
async function detect(tensor) {
|
||||
const result = await faceapi
|
||||
.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||
async function main() {
|
||||
wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/', true);
|
||||
await tf.setBackend('wasm');
|
||||
await tf.ready();
|
||||
console.log(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf.getBackend()}`); // eslint-disable-line no-console
|
||||
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model'); // load models from a specific patch
|
||||
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
|
||||
await faceapi.nets.ageGenderNet.loadFromDisk('model');
|
||||
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
|
||||
await faceapi.nets.faceExpressionNet.loadFromDisk('model');
|
||||
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.1, maxResults: 10 }); // set model options
|
||||
const tensor = await readImage('demo/sample1.jpg');
|
||||
const t0 = performance.now();
|
||||
const result = await faceapi.detectAllFaces(tensor, options) // run detection
|
||||
.withFaceLandmarks()
|
||||
.withFaceExpressions()
|
||||
.withFaceDescriptors()
|
||||
.withAgeAndGender();
|
||||
return result;
|
||||
}
|
||||
|
||||
function print(face) {
|
||||
const expression = Object.entries(face.expressions).reduce((acc, val) => ((val[1] > acc[1]) ? val : acc), ['', 0]);
|
||||
const box = [face.alignedRect._box._x, face.alignedRect._box._y, face.alignedRect._box._width, face.alignedRect._box._height];
|
||||
const gender = `Gender: ${Math.round(100 * face.genderProbability)}% ${face.gender}`;
|
||||
log.data(`Detection confidence: ${Math.round(100 * face.detection._score)}% ${gender} Age: ${Math.round(10 * face.age) / 10} Expression: ${Math.round(100 * expression[1])}% ${expression[0]} Box: ${box.map((a) => Math.round(a))}`);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
log.header();
|
||||
log.info('FaceAPI single-process test');
|
||||
|
||||
faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
|
||||
|
||||
await faceapi.tf.setBackend('wasm');
|
||||
await faceapi.tf.enableProdMode();
|
||||
await faceapi.tf.ENV.set('DEBUG', false);
|
||||
await faceapi.tf.ready();
|
||||
|
||||
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
|
||||
|
||||
log.info('Loading FaceAPI models');
|
||||
const modelPath = path.join(__dirname, modelPathRoot);
|
||||
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
|
||||
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
|
||||
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
|
||||
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
|
||||
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
||||
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
|
||||
|
||||
if (process.argv.length !== 3) {
|
||||
const t0 = process.hrtime.bigint();
|
||||
const dir = fs.readdirSync(imgPathRoot);
|
||||
let numImages = 0;
|
||||
for (const img of dir) {
|
||||
if (!img.toLocaleLowerCase().endsWith('.jpg')) continue;
|
||||
numImages += 1;
|
||||
const c = await image(path.join(imgPathRoot, img));
|
||||
const result = await detect(c);
|
||||
log.data('Image:', img, 'Detected faces:', result.length);
|
||||
for (const face of result) print(face);
|
||||
}
|
||||
const t1 = process.hrtime.bigint();
|
||||
log.info('Processed', numImages, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
|
||||
} else {
|
||||
const param = process.argv[2];
|
||||
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {
|
||||
const c = await image(param);
|
||||
const result = await detect(c);
|
||||
log.data('Image:', param, 'Detected faces:', result.length);
|
||||
for (const face of result) print(face);
|
||||
}
|
||||
}
|
||||
tf.dispose(tensor); // dispose tensors to avoid memory leaks
|
||||
const t1 = performance.now();
|
||||
console.log('Time', t1 - t0); // eslint-disable-line no-console
|
||||
console.log('Result', result); // eslint-disable-line no-console
|
||||
}
|
||||
|
||||
main();
|
||||
|
|
31
demo/node.js
31
demo/node.js
|
@ -1,21 +1,25 @@
|
|||
// @ts-nocheck
|
||||
/**
|
||||
* FaceAPI Demo for NodeJS
|
||||
* - Uses external library [node-fetch](https://www.npmjs.com/package/node-fetch) to load images via http
|
||||
* - Loads image from provided param
|
||||
* - Outputs results to console
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const process = require('process');
|
||||
const path = require('path');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
||||
const log = require('@vladmandic/pilogger');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
||||
const fetch = require('node-fetch').default;
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
||||
const tf = require('@tensorflow/tfjs-node');
|
||||
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
|
||||
|
||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
|
||||
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||
|
||||
const modelPathRoot = '../model';
|
||||
const imgPathRoot = './demo'; // modify to include your sample images
|
||||
const minConfidence = 0.15;
|
||||
const maxResults = 5;
|
||||
let optionsSSDMobileNet;
|
||||
let fetch; // dynamically imported later
|
||||
|
||||
async function image(input) {
|
||||
// read input image file and create tensor to be used for processing
|
||||
|
@ -63,7 +67,7 @@ async function detect(tensor) {
|
|||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
function detectPromise(tensor) {
|
||||
return new Promise((resolve) => faceapi
|
||||
.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||
|
@ -89,12 +93,13 @@ async function main() {
|
|||
log.header();
|
||||
log.info('FaceAPI single-process test');
|
||||
|
||||
// eslint-disable-next-line node/no-extraneous-import
|
||||
fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-missing-import
|
||||
|
||||
await faceapi.tf.setBackend('tensorflow');
|
||||
await faceapi.tf.enableProdMode();
|
||||
await faceapi.tf.ENV.set('DEBUG', false);
|
||||
await faceapi.tf.ready();
|
||||
|
||||
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
|
||||
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version} Backend: ${faceapi.tf?.getBackend()}`);
|
||||
|
||||
log.info('Loading FaceAPI models');
|
||||
const modelPath = path.join(__dirname, modelPathRoot);
|
||||
|
@ -105,7 +110,7 @@ async function main() {
|
|||
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
||||
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
|
||||
|
||||
if (process.argv.length !== 3) {
|
||||
if (process.argv.length !== 4) {
|
||||
const t0 = process.hrtime.bigint();
|
||||
const dir = fs.readdirSync(imgPathRoot);
|
||||
for (const img of dir) {
|
||||
|
@ -117,7 +122,7 @@ async function main() {
|
|||
tensor.dispose();
|
||||
}
|
||||
const t1 = process.hrtime.bigint();
|
||||
log.info('Processed', dir.length, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
|
||||
log.info('Processed', dir.length, 'images in', Math.trunc(Number((t1 - t0)) / 1000 / 1000), 'ms');
|
||||
} else {
|
||||
const param = process.argv[2];
|
||||
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {
|
||||
|
|
|
@ -1,8 +1,14 @@
|
|||
import * as faceapi from '../dist/face-api.esm.js';
|
||||
/**
|
||||
* FaceAPI Demo for Browsers
|
||||
* Loaded via `webcam.html`
|
||||
*/
|
||||
|
||||
import * as faceapi from '../dist/face-api.esm.js'; // use when in dev mode
|
||||
// import * as faceapi from '@vladmandic/face-api'; // use when downloading face-api as npm
|
||||
|
||||
// configuration options
|
||||
const modelPath = '../model/'; // path to model folder that will be loaded using http
|
||||
// const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http
|
||||
// const modelPath = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model/'; // path to model folder that will be loaded using http
|
||||
const minScore = 0.2; // minimum score
|
||||
const maxResults = 5; // maximum number of results to return
|
||||
let optionsSSDMobileNet;
|
||||
|
@ -17,15 +23,14 @@ function str(json) {
|
|||
|
||||
// helper function to print strings to html document as a log
|
||||
function log(...txt) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(...txt);
|
||||
console.log(...txt); // eslint-disable-line no-console
|
||||
const div = document.getElementById('log');
|
||||
if (div) div.innerHTML += `<br>${txt}`;
|
||||
}
|
||||
|
||||
// helper function to draw detected faces
|
||||
function drawFaces(canvas, data, fps) {
|
||||
const ctx = canvas.getContext('2d');
|
||||
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
||||
if (!ctx) return;
|
||||
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
||||
// draw title
|
||||
|
@ -42,18 +47,18 @@ function drawFaces(canvas, data, fps) {
|
|||
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
|
||||
ctx.stroke();
|
||||
ctx.globalAlpha = 1;
|
||||
// const expression = person.expressions.sort((a, b) => Object.values(a)[0] - Object.values(b)[0]);
|
||||
// draw text labels
|
||||
const expression = Object.entries(person.expressions).sort((a, b) => b[1] - a[1]);
|
||||
ctx.fillStyle = 'black';
|
||||
ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 59);
|
||||
ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 41);
|
||||
ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 23);
|
||||
ctx.fillText(`roll:${person.angle.roll.toFixed(3)} pitch:${person.angle.pitch.toFixed(3)} yaw:${person.angle.yaw.toFixed(3)}`, person.detection.box.x, person.detection.box.y - 5);
|
||||
ctx.fillText(`roll:${person.angle.roll}° pitch:${person.angle.pitch}° yaw:${person.angle.yaw}°`, person.detection.box.x, person.detection.box.y - 5);
|
||||
ctx.fillStyle = 'lightblue';
|
||||
ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 60);
|
||||
ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 42);
|
||||
ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 24);
|
||||
ctx.fillText(`roll:${person.angle.roll.toFixed(3)} pitch:${person.angle.pitch.toFixed(3)} yaw:${person.angle.yaw.toFixed(3)}`, person.detection.box.x, person.detection.box.y - 6);
|
||||
ctx.fillText(`roll:${person.angle.roll}° pitch:${person.angle.pitch}° yaw:${person.angle.yaw}°`, person.detection.box.x, person.detection.box.y - 6);
|
||||
// draw face points for each face
|
||||
ctx.globalAlpha = 0.8;
|
||||
ctx.fillStyle = 'lightblue';
|
||||
|
@ -61,7 +66,6 @@ function drawFaces(canvas, data, fps) {
|
|||
for (let i = 0; i < person.landmarks.positions.length; i++) {
|
||||
ctx.beginPath();
|
||||
ctx.arc(person.landmarks.positions[i].x, person.landmarks.positions[i].y, pointSize, 0, 2 * Math.PI);
|
||||
// ctx.fillText(`${i}`, person.landmarks.positions[i].x + 4, person.landmarks.positions[i].y + 4);
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
|
@ -95,7 +99,6 @@ async function setupCamera() {
|
|||
const canvas = document.getElementById('canvas');
|
||||
if (!video || !canvas) return null;
|
||||
|
||||
let msg = '';
|
||||
log('Setting up camera');
|
||||
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https
|
||||
if (!navigator.mediaDevices) {
|
||||
|
@ -103,23 +106,19 @@ async function setupCamera() {
|
|||
return null;
|
||||
}
|
||||
let stream;
|
||||
const constraints = {
|
||||
audio: false,
|
||||
video: { facingMode: 'user', resizeMode: 'crop-and-scale' },
|
||||
};
|
||||
const constraints = { audio: false, video: { facingMode: 'user', resizeMode: 'crop-and-scale' } };
|
||||
if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth };
|
||||
else constraints.video.height = { ideal: window.innerHeight };
|
||||
try {
|
||||
stream = await navigator.mediaDevices.getUserMedia(constraints);
|
||||
} catch (err) {
|
||||
if (err.name === 'PermissionDeniedError' || err.name === 'NotAllowedError') msg = 'camera permission denied';
|
||||
else if (err.name === 'SourceUnavailableError') msg = 'camera not available';
|
||||
log(`Camera Error: ${msg}: ${err.message || err}`);
|
||||
if (err.name === 'PermissionDeniedError' || err.name === 'NotAllowedError') log(`Camera Error: camera permission denied: ${err.message || err}`);
|
||||
if (err.name === 'SourceUnavailableError') log(`Camera Error: camera not available: ${err.message || err}`);
|
||||
return null;
|
||||
}
|
||||
// @ts-ignore
|
||||
if (stream) video.srcObject = stream;
|
||||
else {
|
||||
if (stream) {
|
||||
video.srcObject = stream;
|
||||
} else {
|
||||
log('Camera Error: stream empty');
|
||||
return null;
|
||||
}
|
||||
|
@ -128,31 +127,23 @@ async function setupCamera() {
|
|||
if (settings.deviceId) delete settings.deviceId;
|
||||
if (settings.groupId) delete settings.groupId;
|
||||
if (settings.aspectRatio) settings.aspectRatio = Math.trunc(100 * settings.aspectRatio) / 100;
|
||||
log(`Camera active: ${track.label}`); // ${str(constraints)}
|
||||
log(`Camera active: ${track.label}`);
|
||||
log(`Camera settings: ${str(settings)}`);
|
||||
canvas.addEventListener('click', () => {
|
||||
// @ts-ignore
|
||||
if (video && video.readyState >= 2) {
|
||||
// @ts-ignore
|
||||
if (video.paused) {
|
||||
// @ts-ignore
|
||||
video.play();
|
||||
detectVideo(video, canvas);
|
||||
} else {
|
||||
// @ts-ignore
|
||||
video.pause();
|
||||
}
|
||||
}
|
||||
// @ts-ignore
|
||||
log(`Camera state: ${video.paused ? 'paused' : 'playing'}`);
|
||||
});
|
||||
return new Promise((resolve) => {
|
||||
video.onloadeddata = async () => {
|
||||
// @ts-ignore
|
||||
canvas.width = video.videoWidth;
|
||||
// @ts-ignore
|
||||
canvas.height = video.videoHeight;
|
||||
// @ts-ignore
|
||||
video.play();
|
||||
detectVideo(video, canvas);
|
||||
resolve(true);
|
||||
|
@ -170,7 +161,6 @@ async function setupFaceAPI() {
|
|||
await faceapi.nets.faceRecognitionNet.load(modelPath);
|
||||
await faceapi.nets.faceExpressionNet.load(modelPath);
|
||||
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
|
||||
|
||||
// check tf engine state
|
||||
log(`Models loaded: ${str(faceapi.tf.engine().state.numTensors)} tensors`);
|
||||
}
|
||||
|
@ -180,19 +170,21 @@ async function main() {
|
|||
log('FaceAPI WebCam Test');
|
||||
|
||||
// if you want to use wasm backend location for wasm binaries must be specified
|
||||
// await faceapi.tf.setWasmPaths('../node_modules/@tensorflow/tfjs-backend-wasm/dist/');
|
||||
// await faceapi.tf.setBackend('wasm');
|
||||
// await faceapi.tf?.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${faceapi.tf.version_core}/dist/`);
|
||||
// await faceapi.tf?.setBackend('wasm');
|
||||
// log(`WASM SIMD: ${await faceapi.tf?.env().getAsync('WASM_HAS_SIMD_SUPPORT')} Threads: ${await faceapi.tf?.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') ? 'Multi' : 'Single'}`);
|
||||
|
||||
// default is webgl backend
|
||||
await faceapi.tf.setBackend('webgl');
|
||||
|
||||
await faceapi.tf.enableProdMode();
|
||||
await faceapi.tf.ENV.set('DEBUG', false);
|
||||
await faceapi.tf.ready();
|
||||
|
||||
// tfjs optimizations
|
||||
if (faceapi.tf?.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) faceapi.tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
|
||||
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
|
||||
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
|
||||
|
||||
// check version
|
||||
log(`Version: FaceAPI ${str(faceapi?.version.faceapi || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
|
||||
// log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
|
||||
log(`Version: FaceAPI ${str(faceapi?.version || '(not loaded)')} TensorFlow/JS ${str(faceapi.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi.tf?.getBackend() || '(not loaded)')}`);
|
||||
|
||||
await setupFaceAPI();
|
||||
await setupCamera();
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
export * from '../types/face-api';
|
|
@ -0,0 +1 @@
|
|||
export * from '../types/face-api';
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
|||
export * from '../types/face-api';
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
|||
export * from '../types/face-api';
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
|||
export * from '../types/face-api';
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
|||
export * from '../types/face-api';
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
import '@tensorflow/tfjs-core';
|
||||
import '@tensorflow/tfjs-core/dist/types';
|
||||
import '@tensorflow/tfjs-core/dist/register_all_gradients';
|
||||
import '@tensorflow/tfjs-core/dist/public/chained_ops/register_all_chained_ops';
|
||||
import '@tensorflow/tfjs-data';
|
||||
import '@tensorflow/tfjs-layers';
|
||||
import '@tensorflow/tfjs-converter';
|
||||
import '@tensorflow/tfjs-backend-cpu';
|
||||
import '@tensorflow/tfjs-backend-webgl';
|
||||
import '@tensorflow/tfjs-backend-wasm';
|
||||
import '@tensorflow/tfjs-backend-webgpu';
|
||||
*/
|
||||
|
||||
export declare const version: {
|
||||
'tfjs-core': string;
|
||||
'tfjs-backend-cpu': string;
|
||||
'tfjs-backend-webgl': string;
|
||||
'tfjs-data': string;
|
||||
'tfjs-layers': string;
|
||||
'tfjs-converter': string;
|
||||
tfjs: string;
|
||||
};
|
||||
|
||||
export { io, browser, image } from '@tensorflow/tfjs-core';
|
||||
export { tensor, tidy, softmax, unstack, relu, add, conv2d, cast, zeros, concat, avgPool, stack, fill, transpose, tensor1d, tensor2d, tensor3d, tensor4d, maxPool, matMul, mul, sub, scalar } from '@tensorflow/tfjs-core';
|
||||
export { div, pad, slice, reshape, slice3d, expandDims, depthwiseConv2d, separableConv2d, sigmoid, exp, tile, batchNorm, clipByValue } from '@tensorflow/tfjs-core';
|
||||
export { ENV, Variable, Tensor, TensorLike, Rank, Tensor1D, Tensor2D, Tensor3D, Tensor4D, Tensor5D, NamedTensorMap } from '@tensorflow/tfjs-core';
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,9 @@
|
|||
export declare const version: {
|
||||
'tfjs-core': string;
|
||||
'tfjs-backend-cpu': string;
|
||||
'tfjs-backend-webgl': string;
|
||||
'tfjs-data': string;
|
||||
'tfjs-layers': string;
|
||||
'tfjs-converter': string;
|
||||
tfjs: string;
|
||||
};
|
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
Face-API
|
||||
homepage: <https://github.com/vladmandic/face-api>
|
||||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
|
||||
var e="4.22.0";var s="4.22.0";var t="4.22.0";var n="4.22.0";var i="4.22.0";var w={tfjs:e,"tfjs-core":e,"tfjs-converter":s,"tfjs-backend-cpu":t,"tfjs-backend-webgl":n,"tfjs-backend-wasm":i};export{w as version};
|
72
package.json
72
package.json
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@vladmandic/face-api",
|
||||
"version": "1.2.2",
|
||||
"version": "1.7.15",
|
||||
"description": "FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS",
|
||||
"sideEffects": false,
|
||||
"main": "dist/face-api.node.js",
|
||||
"module": "dist/face-api.esm.js",
|
||||
"browser": "dist/face-api.esm.js",
|
||||
"types": "types/index.d.ts",
|
||||
"types": "types/face-api.d.ts",
|
||||
"author": "Vladimir Mandic <mandic00@live.com>",
|
||||
"bugs": {
|
||||
"url": "https://github.com/vladmandic/face-api/issues"
|
||||
|
@ -14,18 +14,19 @@
|
|||
"homepage": "https://vladmandic.github.io/face-api/demo/webcam.html",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
"node": ">=14.0.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/vladmandic/face-api.git"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "node --trace-warnings demo/node.js",
|
||||
"dev": "node --trace-warnings server/serve.js",
|
||||
"build": "rimraf dist/* types/* typedoc/* && node server/build.js",
|
||||
"lint": "eslint src/**/* demo/*.js server/*.js",
|
||||
"test": "node --trace-warnings test/test-node.js"
|
||||
"start": "node --no-warnings demo/node.js",
|
||||
"build": "node build.js",
|
||||
"dev": "build --profile development",
|
||||
"lint": "eslint src/ demo/",
|
||||
"test": "node --trace-warnings test/test-node.js",
|
||||
"scan": "npx auditjs@latest ossi --dev --quiet"
|
||||
},
|
||||
"keywords": [
|
||||
"face-api",
|
||||
|
@ -41,31 +42,38 @@
|
|||
"tfjs"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@canvas/image": "^1.0.1",
|
||||
"@tensorflow/tfjs": "^3.6.0",
|
||||
"@tensorflow/tfjs-backend-wasm": "^3.6.0",
|
||||
"@tensorflow/tfjs-node": "^3.6.1",
|
||||
"@tensorflow/tfjs-node-gpu": "^3.6.1",
|
||||
"@types/node": "^15.0.1",
|
||||
"@typescript-eslint/eslint-plugin": "^4.22.0",
|
||||
"@typescript-eslint/parser": "^4.22.0",
|
||||
"@vladmandic/pilogger": "^0.2.17",
|
||||
"canvas": "^2.7.0",
|
||||
"chokidar": "^3.5.1",
|
||||
"dayjs": "^1.10.4",
|
||||
"esbuild": "^0.11.16",
|
||||
"eslint": "^7.25.0",
|
||||
"eslint-config-airbnb-base": "^14.2.1",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
"eslint-plugin-json": "^2.1.2",
|
||||
"@canvas/image": "^2.0.0",
|
||||
"@microsoft/api-extractor": "^7.49.2",
|
||||
"@tensorflow/tfjs": "^4.22.0",
|
||||
"@tensorflow/tfjs-backend-cpu": "^4.22.0",
|
||||
"@tensorflow/tfjs-backend-wasm": "^4.22.0",
|
||||
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
|
||||
"@tensorflow/tfjs-backend-webgpu": "4.22.0",
|
||||
"@tensorflow/tfjs-converter": "^4.22.0",
|
||||
"@tensorflow/tfjs-core": "^4.22.0",
|
||||
"@tensorflow/tfjs-data": "^4.22.0",
|
||||
"@tensorflow/tfjs-layers": "^4.22.0",
|
||||
"@tensorflow/tfjs-node": "^4.22.0",
|
||||
"@tensorflow/tfjs-node-gpu": "^4.22.0",
|
||||
"@types/node": "^22.13.1",
|
||||
"@types/offscreencanvas": "^2019.7.3",
|
||||
"@typescript-eslint/eslint-plugin": "^8.5.0",
|
||||
"@typescript-eslint/parser": "^8.5.0",
|
||||
"@vladmandic/build": "^0.10.2",
|
||||
"@vladmandic/pilogger": "^0.5.1",
|
||||
"ajv": "^8.17.1",
|
||||
"esbuild": "^0.24.2",
|
||||
"eslint": "8.57.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-plugin-import": "^2.30.0",
|
||||
"eslint-plugin-json": "^4.0.1",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-promise": "^5.1.0",
|
||||
"node-fetch": "^2.6.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"eslint-plugin-promise": "^7.1.0",
|
||||
"node-fetch": "^3.3.2",
|
||||
"rimraf": "^6.0.1",
|
||||
"seedrandom": "^3.0.5",
|
||||
"simple-git": "^2.38.0",
|
||||
"tslib": "^2.2.0",
|
||||
"typedoc": "^0.20.36",
|
||||
"typescript": "^4.2.4"
|
||||
"tslib": "^2.8.1",
|
||||
"typedoc": "^0.27.6",
|
||||
"typescript": "5.7.3"
|
||||
}
|
||||
}
|
||||
|
|
237
server/build.js
237
server/build.js
|
@ -1,237 +0,0 @@
|
|||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
/* eslint-disable node/no-unpublished-require */
|
||||
|
||||
const esbuild = require('esbuild');
|
||||
const ts = require('typescript');
|
||||
const log = require('@vladmandic/pilogger');
|
||||
const TypeDoc = require('typedoc');
|
||||
const changelog = require('./changelog');
|
||||
|
||||
const banner = { js: `
|
||||
/*
|
||||
Face-API
|
||||
homepage: <https://github.com/vladmandic/face-api>
|
||||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
` };
|
||||
|
||||
let td = null;
|
||||
|
||||
// tsc configuration
|
||||
const tsconfig = {
|
||||
noEmitOnError: false,
|
||||
target: ts.ScriptTarget.ES2018,
|
||||
module: ts.ModuleKind.ES2020,
|
||||
// outFile: "dist/face-api.d.ts",
|
||||
outDir: 'types/',
|
||||
declaration: true,
|
||||
emitDeclarationOnly: true,
|
||||
emitDecoratorMetadata: true,
|
||||
experimentalDecorators: true,
|
||||
skipLibCheck: true,
|
||||
strictNullChecks: true,
|
||||
baseUrl: './',
|
||||
paths: {
|
||||
tslib: ['node_modules/tslib/tslib.d.ts'],
|
||||
},
|
||||
};
|
||||
|
||||
// common configuration
|
||||
const common = {
|
||||
banner,
|
||||
minifyWhitespace: false,
|
||||
minifyIdentifiers: false,
|
||||
minifySyntax: false,
|
||||
bundle: true,
|
||||
sourcemap: true,
|
||||
metafile: true,
|
||||
logLevel: 'error',
|
||||
target: 'es2018',
|
||||
// tsconfig: './tsconfig.json',
|
||||
};
|
||||
|
||||
const targets = {
|
||||
node: {
|
||||
tfjs: {
|
||||
platform: 'node',
|
||||
format: 'cjs',
|
||||
entryPoints: ['src/tfjs/tf-node.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['@tensorflow'],
|
||||
},
|
||||
node: {
|
||||
platform: 'node',
|
||||
format: 'cjs',
|
||||
entryPoints: ['src/index.ts'],
|
||||
outfile: 'dist/face-api.node.js',
|
||||
external: ['@tensorflow'],
|
||||
},
|
||||
},
|
||||
nodeGPU: {
|
||||
tfjs: {
|
||||
platform: 'node',
|
||||
format: 'cjs',
|
||||
entryPoints: ['src/tfjs/tf-node-gpu.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['@tensorflow'],
|
||||
},
|
||||
node: {
|
||||
platform: 'node',
|
||||
format: 'cjs',
|
||||
entryPoints: ['src/index.ts'],
|
||||
outfile: 'dist/face-api.node-gpu.js',
|
||||
external: ['@tensorflow'],
|
||||
},
|
||||
},
|
||||
nodeCPU: {
|
||||
tfjs: {
|
||||
platform: 'node',
|
||||
format: 'cjs',
|
||||
entryPoints: ['src/tfjs/tf-node-cpu.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['@tensorflow'],
|
||||
},
|
||||
node: {
|
||||
platform: 'node',
|
||||
format: 'cjs',
|
||||
entryPoints: ['src/index.ts'],
|
||||
outfile: 'dist/face-api.node-cpu.js',
|
||||
external: ['@tensorflow'],
|
||||
},
|
||||
},
|
||||
browserNoBundle: {
|
||||
tfjs: {
|
||||
platform: 'browser',
|
||||
format: 'esm',
|
||||
entryPoints: ['src/tfjs/tf-browser.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['fs', 'buffer', 'util', 'os', '@tensorflow'],
|
||||
},
|
||||
esm: {
|
||||
platform: 'browser',
|
||||
format: 'esm',
|
||||
entryPoints: ['src/index.ts'],
|
||||
outfile: 'dist/face-api.esm-nobundle.js',
|
||||
external: ['fs', 'buffer', 'util', 'os', '@tensorflow', 'tfjs.esm.js'],
|
||||
},
|
||||
},
|
||||
browserBundle: {
|
||||
tfjs: {
|
||||
platform: 'browser',
|
||||
format: 'esm',
|
||||
entryPoints: ['src/tfjs/tf-browser.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['fs', 'buffer', 'util', 'os'],
|
||||
},
|
||||
iife: {
|
||||
platform: 'browser',
|
||||
format: 'iife',
|
||||
globalName: 'faceapi',
|
||||
entryPoints: ['src/index.ts'],
|
||||
outfile: 'dist/face-api.js',
|
||||
external: ['fs', 'buffer', 'util', 'os'],
|
||||
},
|
||||
esm: {
|
||||
platform: 'browser',
|
||||
format: 'esm',
|
||||
entryPoints: ['src/index.ts'],
|
||||
outfile: 'dist/face-api.esm.js',
|
||||
external: ['fs', 'buffer', 'util', 'os'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
async function getStats(json) {
|
||||
const stats = {};
|
||||
if (json && json.metafile.inputs && json.metafile.outputs) {
|
||||
for (const [key, val] of Object.entries(json.metafile.inputs)) {
|
||||
if (key.startsWith('node_modules')) {
|
||||
stats.modules = (stats.modules || 0) + 1;
|
||||
stats.moduleBytes = (stats.moduleBytes || 0) + val.bytes;
|
||||
} else {
|
||||
stats.imports = (stats.imports || 0) + 1;
|
||||
stats.importBytes = (stats.importBytes || 0) + val.bytes;
|
||||
}
|
||||
}
|
||||
const files = [];
|
||||
for (const [key, val] of Object.entries(json.metafile.outputs)) {
|
||||
if (!key.endsWith('.map')) {
|
||||
files.push(key);
|
||||
stats.outputBytes = (stats.outputBytes || 0) + val.bytes;
|
||||
}
|
||||
}
|
||||
stats.outputFiles = files.join(', ');
|
||||
}
|
||||
return stats;
|
||||
}
|
||||
|
||||
function compile(fileNames, options) {
|
||||
log.info('Compile typings:', fileNames);
|
||||
const program = ts.createProgram(fileNames, options);
|
||||
const emit = program.emit();
|
||||
const diag = ts
|
||||
.getPreEmitDiagnostics(program)
|
||||
.concat(emit.diagnostics);
|
||||
for (const info of diag) {
|
||||
// @ts-ignore
|
||||
const msg = info.messageText.messageText || info.messageText;
|
||||
if (msg.includes('package.json')) continue;
|
||||
if (msg.includes('Expected 0 arguments, but got 1')) continue;
|
||||
if (info.file) {
|
||||
const pos = info.file.getLineAndCharacterOfPosition(info.start || 0);
|
||||
log.error(`TSC: ${info.file.fileName} [${pos.line + 1},${pos.character + 1}]:`, msg);
|
||||
} else {
|
||||
log.error('TSC:', msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function typedoc(entryPoint) {
|
||||
log.info('Generate TypeDocs:', entryPoint);
|
||||
if (!td) {
|
||||
td = new TypeDoc.Application();
|
||||
td.options.addReader(new TypeDoc.TSConfigReader());
|
||||
td.bootstrap({ entryPoints: entryPoint });
|
||||
}
|
||||
const project = td.convert();
|
||||
const result = project ? await td.generateDocs(project, 'typedoc') : null;
|
||||
if (result) log.warn('TypeDoc:', result);
|
||||
}
|
||||
|
||||
// rebuild on file change
|
||||
async function build(f, msg, dev = false) {
|
||||
log.info('Build: file', msg, f, 'target:', common.target);
|
||||
try {
|
||||
// rebuild all target groups and types
|
||||
for (const [targetGroupName, targetGroup] of Object.entries(targets)) {
|
||||
for (const [targetName, targetOptions] of Object.entries(targetGroup)) {
|
||||
// if triggered from watch mode, rebuild only browser bundle
|
||||
// if ((require.main !== module) && (targetGroupName !== 'browserBundle')) continue;
|
||||
// @ts-ignore
|
||||
const meta = await esbuild.build({ ...common, ...targetOptions });
|
||||
const stats = await getStats(meta);
|
||||
log.state(`Build for: ${targetGroupName} type: ${targetName}:`, stats);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// catch errors and print where it occured
|
||||
log.error('Build error', JSON.stringify(err.errors || err, null, 2));
|
||||
if (require.main === module) process.exit(1);
|
||||
}
|
||||
|
||||
if (!dev) {
|
||||
// generate typings & typedoc only when run as explict build
|
||||
await compile(targets.browserBundle.esm.entryPoints, tsconfig);
|
||||
await changelog.update('../CHANGELOG.md');
|
||||
await typedoc(targets.browserBundle.esm.entryPoints);
|
||||
}
|
||||
|
||||
if (require.main === module) process.exit(0);
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
log.header();
|
||||
build('all', 'startup');
|
||||
} else {
|
||||
exports.build = build;
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
|
||||
const dayjs = require('dayjs');
|
||||
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
|
||||
const simpleGit = require('simple-git/promise');
|
||||
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
|
||||
const logger = require('@vladmandic/pilogger');
|
||||
const app = require('../package.json');
|
||||
|
||||
const git = simpleGit();
|
||||
|
||||
let text = `# ${app.name}
|
||||
|
||||
Version: **${app.version}**
|
||||
Description: **${app.description}**
|
||||
|
||||
Author: **${app.author}**
|
||||
License: **${app.license}** </LICENSE>
|
||||
Repository: **<${app.repository.url}>**
|
||||
|
||||
## Changelog
|
||||
`;
|
||||
|
||||
async function update(f) {
|
||||
const gitLog = await git.log();
|
||||
// @ts-ignore
|
||||
const log = gitLog.all.sort((a, b) => (new Date(b.date).getTime() - new Date(a.date).getTime()));
|
||||
|
||||
let previous = '';
|
||||
const headings = [];
|
||||
for (const l of log) {
|
||||
const msg = l.message.toLowerCase();
|
||||
if ((l.refs !== '') || msg.match(/^[0-99].[0-99].[0-99]/)) {
|
||||
const dt = dayjs(l.date).format('YYYY/MM/DD');
|
||||
let ver = msg.match(/[0-99].[0-99].[0-99]/) ? msg : l.refs;
|
||||
ver = ver.replace('tag: v', '').replace('tag: ', 'release: ').split(',')[0];
|
||||
const heading = `\n### **${ver}** ${dt} ${l.author_email}\n\n`;
|
||||
if (!headings.includes(heading) && !ver.startsWith('tag')) {
|
||||
headings.push(heading);
|
||||
text += heading;
|
||||
}
|
||||
} else if ((msg.length > 2) && !msg.startsWith('update') && (previous !== msg)) {
|
||||
previous = msg;
|
||||
text += `- ${msg}\n`;
|
||||
}
|
||||
}
|
||||
|
||||
const name = path.join(__dirname, f);
|
||||
fs.writeFileSync(name, text);
|
||||
logger.info('Update Change log:', [name]);
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
update('../CHANGELOG.md');
|
||||
} else {
|
||||
exports.update = update;
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIFazCCA1OgAwIBAgIUKQKodDBJnuweJs5IcTyL4NIp3vgwDQYJKoZIhvcNAQEL
|
||||
BQAwRTELMAkGA1UEBhMCVVMxEDAOBgNVBAgMB0Zsb3JpZGExDjAMBgNVBAcMBU1p
|
||||
YW1pMRQwEgYDVQQKDAtAdmxhZG1hbmRpYzAeFw0yMDExMDcxNTE3NDNaFw0yMTEx
|
||||
MDcxNTE3NDNaMEUxCzAJBgNVBAYTAlVTMRAwDgYDVQQIDAdGbG9yaWRhMQ4wDAYD
|
||||
VQQHDAVNaWFtaTEUMBIGA1UECgwLQHZsYWRtYW5kaWMwggIiMA0GCSqGSIb3DQEB
|
||||
AQUAA4ICDwAwggIKAoICAQDSC88PF8NyLkagK5mAZ/d739SOU16l2Cx3zE35zZQh
|
||||
O29+1L4L+oMksLYipo+FMgtGO+MSzFsvGgKCs2sDSdfyoNSTZ3QaN4BAZ0sbq+wL
|
||||
cke7yRBTM/XIGOQfhqq8yC2q8/zXwUbZg0UsCAxDGNwUr0Qlm829laIU/UN1KcYS
|
||||
57Nebl1z05wMEvYmyl4JBAl9ozne7KS9DyW7jbrAXE8TaEy3+pY66kx5GG6v2+up
|
||||
ScITGm4YPmPPlpOF1UjQloosgxdVa+fVp8aNCa/rf0JNO0Uhb3OKOZ+4kYmpfPn/
|
||||
trwoKWAa6CV1uAJ+3zDkLMq1JNlrV4OMp1QvX0wzA47a/n466JMN9SFb0Ng5wf19
|
||||
VOtT5Zu7chDStBudVjxlMDfUixvhvn4sjbaLNYR1fyWPoNXwr0KX2lpTP1QOzp9/
|
||||
Sd0iiJ8RPfXn8Xo26MStu4I52CZjS7yEMgJGCLH/mgPuSbrHHYYrrrCPJgmQOZG2
|
||||
TNMI+EqOwQvHh2ghdv7t7EEk4IslBk0QzufMXQ2WFXQ20nvj74mrmmiMuBcmonpR
|
||||
0egA5/M18ZPLQxYu0Q86NUr4XHtAG1fq+n8pseQ7Avy6Gk6HRiezCbB7TJ9rnNeu
|
||||
jie1TDajC6W7rx0VF7hcxkIrDgNgnYcjXUV2hMx1lo4fIoWkL3nJJVEthMVIcJOX
|
||||
EwIDAQABo1MwUTAdBgNVHQ4EFgQUHawIRAo1bW8Xy7l4oKfM+ESjhs0wHwYDVR0j
|
||||
BBgwFoAUHawIRAo1bW8Xy7l4oKfM+ESjhs0wDwYDVR0TAQH/BAUwAwEB/zANBgkq
|
||||
hkiG9w0BAQsFAAOCAgEAozQJk5Ahx7rDn/aMXLdZFxR81VfkmHDm7NhlJsdVKUx5
|
||||
o/iegXnvwc1PoeKsz2S504QiuL8l7jqZoU2WPIm7Vlr+oxBgiKqjo1EqBsUgNCZ7
|
||||
qxMD84TVp/KBGjKUh1TXhjJwGGfNNr+R/fJGw+36UeuY3fSckjaYTuNuVElp+DoZ
|
||||
/pGyu1qpcybLfiR8mpQkCeU/iBq5gIjWddbVjlYoTKfqULZrpsAF2AeqELEgyshl
|
||||
p3PNhW/54TJSn4mWK+39BibYHPkvx8orEuWKyjjRk82hEXi7J3hsGKX29qC3oO40
|
||||
67DKDWmZdMCz+E1ERf10V0bSp6iJnnlwknHJloZUETV1NY/DdoSC6e8CN0+0cQqL
|
||||
aJefJ483O3sXyN3v3+DaEFBLPFgRFGZB7eaBwR2xAv/KfjT5dSyi+wA4LZAxsQMC
|
||||
Q7UYGNAfHLNHJo/bsj12+JDhJaFZ/KoBKzyMUuEXmvjxXNDMCfm+gVQFoLyXkGq3
|
||||
491W/O7LjR6pkD+ce0qeTFMu3nfUubyfbONVDEfuH4GC1e+FAggCRaBnFsVzCzXj
|
||||
jxOOLoQ9nwLk8v17mx0BSwX4iuqvXFntfJbzfcnzQfx/qqPFheIbGnmKw1lrRML8
|
||||
87ZbN6t01+v2YyYe6Mc7p80s1R3jc8aVX8ca2KcYwsJAkg/xz0q5RJwsE1is5UY=
|
||||
-----END CERTIFICATE-----
|
|
@ -1,52 +0,0 @@
|
|||
-----BEGIN PRIVATE KEY-----
|
||||
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQDSC88PF8NyLkag
|
||||
K5mAZ/d739SOU16l2Cx3zE35zZQhO29+1L4L+oMksLYipo+FMgtGO+MSzFsvGgKC
|
||||
s2sDSdfyoNSTZ3QaN4BAZ0sbq+wLcke7yRBTM/XIGOQfhqq8yC2q8/zXwUbZg0Us
|
||||
CAxDGNwUr0Qlm829laIU/UN1KcYS57Nebl1z05wMEvYmyl4JBAl9ozne7KS9DyW7
|
||||
jbrAXE8TaEy3+pY66kx5GG6v2+upScITGm4YPmPPlpOF1UjQloosgxdVa+fVp8aN
|
||||
Ca/rf0JNO0Uhb3OKOZ+4kYmpfPn/trwoKWAa6CV1uAJ+3zDkLMq1JNlrV4OMp1Qv
|
||||
X0wzA47a/n466JMN9SFb0Ng5wf19VOtT5Zu7chDStBudVjxlMDfUixvhvn4sjbaL
|
||||
NYR1fyWPoNXwr0KX2lpTP1QOzp9/Sd0iiJ8RPfXn8Xo26MStu4I52CZjS7yEMgJG
|
||||
CLH/mgPuSbrHHYYrrrCPJgmQOZG2TNMI+EqOwQvHh2ghdv7t7EEk4IslBk0QzufM
|
||||
XQ2WFXQ20nvj74mrmmiMuBcmonpR0egA5/M18ZPLQxYu0Q86NUr4XHtAG1fq+n8p
|
||||
seQ7Avy6Gk6HRiezCbB7TJ9rnNeujie1TDajC6W7rx0VF7hcxkIrDgNgnYcjXUV2
|
||||
hMx1lo4fIoWkL3nJJVEthMVIcJOXEwIDAQABAoICAF45S+ZSW6uh1K7PQCnY+a0J
|
||||
CJncDk5JPhFzhds0fGm39tknaCWJeEECQIIkw6cVfvc/sCpjn9fuTAgDolK0UnoV
|
||||
6aZCN1P3Z8H8VDYSlm3AEyvLE1avrWbYu6TkzTyoc8wHbXn/yt+SQnpxFccXpMpm
|
||||
oSRZ0x5jvHS79AHf/mnGpLEMw0FNQOgtrVxTVYGn3PYOPcyhzXi+Dcgn2QmnnxVu
|
||||
qVOyxqehKTL9YdHjzsB/RN868P5RJocd3gmgVuyzS0KSf+oi4Ln4bFoiaVc0HDL3
|
||||
DpjkHSl5lgu+xclRNfifKaK+hM0tLHi1VfFB//WrnjdKU3oSpQF4oowprM4Jn5AP
|
||||
jhRI54JWZlWnvbiAOx7D49xFga3EnqjVH6So2gxi+q3Dv25luXGAnueaBPDpVC6c
|
||||
nkJm2aCl7T3xlVpW8O5Fs+rsP8Xr9RTyEQJauM01uOi3N2zEeO8ERxTYEW5Sy2U7
|
||||
OFKRXtLj7Jnejib/SxWGcIX4Wid5QFAygbXz4APfFN22QU0fqmhm4/c2OB/xM8qr
|
||||
VVFx4xlG2wnuq5CZdZjmK3MTbmSM+pWW8mly/+++p694cf5oXGenYus/JWFNwxj/
|
||||
fPyA7zQmaTOidu6clDHzkPCOE7TBv9TkQ7lL6ClgE7B39JR65ZQtjCYqRsADKsGI
|
||||
dFMg+HDmGbVEfWg2V0GBAoIBAQDupImrJ0JXHA/0SEC2Tbz7pE60fRwmBFdhvk4Z
|
||||
rzZiaOl+M2HXQU6b5DYhKcgdiFah5IuAnsRPo6X5Ug+Q1DV3OFTuEGAkXgqZliNa
|
||||
aXsJcc0++DYlXX3BrTb66gylVLQRs5tZzsXps5iXWclziDC2go8RKnCwxsxwbzVq
|
||||
FP4hoBP4dp83WoLF4NznnGFGw3/KLlMivtRxDE5OegpxTuWGlA/bVtT187Ksuuz3
|
||||
dFUayLfpg0ABS/E7wwAJjSUpPPEi3J/G255H3lZXgS1gWcAf3rGDQYlJKF8UHdja
|
||||
yWQcAOF+b/bYEpa4lHw+UtKNNkPTiCV4Y7CNQd8a2Gcl7VFTAoIBAQDhUs9r1dhm
|
||||
rUlNAunVZZZVZ91XhXeqVTa/9xUDEvDh91nB5c7CcuNXxwcX4oTsMF4Bc7CHlvOv
|
||||
pybp+QLjK310VjxxkFYJT0TKWuYqLjtNkQ93sp8wF3gVCf8m8bMOX/gPfQzNZWKp
|
||||
un+ZWnzXNU5d2A+63xbZmFzT0Zo6H/h9YEO5Xxw32HCKFzEhl5JD34muZTEXSpdD
|
||||
p7LUUr5LvnoUqEzonhXx2qRnTLP87d1o0GlkVex9HeeeBgrvm57QYoJnABxw9UFM
|
||||
/ocLeYsjkmqJQRBDWgiwQlos1pdZyX2Yj20b7Wm5Pxd4aM9gh5EZZMXeQHhbHlWz
|
||||
UY1IPxfAkytBAoIBAHmYavFDisD58oMlAZwiViXeXaAHk30nfyK1pfPeXBaeoEKG
|
||||
idb1VsmF6bLSKD4sBwBshExgGWT+3IYCMx43kpqRoGzA+UvugvYpExBxaJiyXMM2
|
||||
E9jMH1S9HqOQ+CqR00KlwoVrH1rqANk1jbkJbtDAC4fSmSLp2Kd9crj/w1F80FAs
|
||||
mQnKW5HZ9pUpEEPPP2DUY9XzaCnF/GxuML31VmxRKxc20kIUDzmF8VJQ+0Avf85C
|
||||
6yz99gfeXzl+qq2teKyrv9nCc47pEhN6JZXPhV53yPk5PmuBX5jPcHxiW1kNddhH
|
||||
0n3cUuHv/rJ+3vvG555z46vJF9+R7c0u8LfZiTMCggEBAMQd4a/IN0xXM1+2U3SL
|
||||
sSew+XR+FMPK25aGJmHAkKz9L8CWlzmj6cCy2LevT2aMSqYU3eeGOZ//at1nAV5c
|
||||
shsaHA30RQ5hUkyWhZLdHnzK752NeQTQyJH3W3+4C9NNMIm6m/QCdLeqPflqSxK9
|
||||
sPH5ZueN2UOXW+R5oTVKMmxd51RnNhZdasamnPrSBFrTK/EA3pOZNsOKKRqo0jz3
|
||||
Eyb7vcUSI6OYXFQU7OwO1RGvpKvSJb5Y0wo11DrtRnO16i5gaGDg9u9e8ofISJSz
|
||||
kcrZOKCGst1HQ1mXhbB+sbSh0aPnJog4I+OHxkgMdvyVO6vQjXExnAIxzzi8wZ25
|
||||
+oECggEBAIT6q/sn8xFt5Jwc/0Z7YUjd415Nknam09tnbB+UPRR6lt6JFoILx8by
|
||||
5Y1sN30HWDv27v9G32oZhUDii3Rt3PkbYLqlHy7XBMEXA9WIUo+3Be7mtdL8Wfrj
|
||||
0zn0b7Hks9a9KsElG1dXUopwjMRL3M22UamaN7e/gl5jz2I7pyc5oaqz9GRDV5yG
|
||||
slb6gGZ5naMycJD3p8vutXbmgKRr9beRp55UICAbEMdr5p3ks8bfR33Z6t+a97u1
|
||||
IxI5x5Lb0fdfvL8JK3nRWn7Uzbmm5Ni/OaODNKP+fIm9m2yDAs8LM8RGpPtk6i0d
|
||||
qIRta3H9KNw2Mhpkm77TtUSV/W5aOmY=
|
||||
-----END PRIVATE KEY-----
|
162
server/serve.js
162
server/serve.js
|
@ -1,162 +0,0 @@
|
|||
/*
|
||||
micro http2 server with file monitoring and automatic app rebuild
|
||||
- can process concurrent http requests
|
||||
- monitors specified filed and folders for changes
|
||||
- triggers library and application rebuild
|
||||
- any build errors are immediately displayed and can be corrected without need for restart
|
||||
- passthrough data compression
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const zlib = require('zlib');
|
||||
const http = require('http');
|
||||
const http2 = require('http2');
|
||||
const path = require('path');
|
||||
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
|
||||
const chokidar = require('chokidar');
|
||||
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
|
||||
const log = require('@vladmandic/pilogger');
|
||||
const build = require('./build.js');
|
||||
|
||||
// app configuration
|
||||
// you can provide your server key and certificate or use provided self-signed ones
|
||||
// self-signed certificate generated using:
|
||||
// openssl req -x509 -newkey rsa:4096 -nodes -keyout https.key -out https.crt -days 365 -subj "/C=US/ST=Florida/L=Miami/O=@vladmandic"
|
||||
// client app does not work without secure server since browsers enforce https for webcam access
|
||||
const options = {
|
||||
key: fs.readFileSync('server/https.key'),
|
||||
cert: fs.readFileSync('server/https.crt'),
|
||||
root: '..',
|
||||
default: 'demo/index.html',
|
||||
httpPort: 8000,
|
||||
httpsPort: 8001,
|
||||
insecureHTTPParser: false,
|
||||
minElapsed: 2,
|
||||
monitor: ['package.json', 'demo', 'src'],
|
||||
};
|
||||
|
||||
// just some predefined mime types
|
||||
const mime = {
|
||||
'.html': 'text/html; charset=utf-8',
|
||||
'.js': 'text/javascript; charset=utf-8',
|
||||
'.css': 'text/css; charset=utf-8',
|
||||
'.json': 'application/json; charset=utf-8',
|
||||
'.png': 'image/png',
|
||||
'.jpg': 'image/jpeg',
|
||||
'.gif': 'image/gif',
|
||||
'.ico': 'image/x-icon',
|
||||
'.svg': 'image/svg+xml',
|
||||
'.wav': 'audio/wav',
|
||||
'.mp4': 'video/mp4',
|
||||
'.woff': 'font/woff',
|
||||
'.woff2': 'font/woff2',
|
||||
'.ttf': 'font/ttf',
|
||||
'.wasm': 'application/wasm',
|
||||
};
|
||||
|
||||
// checks for multiple events triggering within minElapsed and merge get into single event
|
||||
let last = Date.now();
|
||||
async function buildAll(evt, msg) {
|
||||
const now = Date.now();
|
||||
if ((now - last) > options.minElapsed) build.build(evt, msg, true);
|
||||
else log.state('Build: merge event file', msg, evt);
|
||||
last = now;
|
||||
}
|
||||
|
||||
// watch filesystem for any changes and notify build when needed
|
||||
async function watch() {
|
||||
const watcher = chokidar.watch(options.monitor, {
|
||||
persistent: true,
|
||||
ignorePermissionErrors: false,
|
||||
alwaysStat: false,
|
||||
ignoreInitial: true,
|
||||
followSymlinks: true,
|
||||
usePolling: false,
|
||||
useFsEvents: false,
|
||||
atomic: true,
|
||||
});
|
||||
// single event handler for file add/change/delete
|
||||
watcher
|
||||
.on('add', (evt) => buildAll(evt, 'add'))
|
||||
.on('change', (evt) => buildAll(evt, 'modify'))
|
||||
.on('unlink', (evt) => buildAll(evt, 'remove'))
|
||||
.on('error', (err) => log.error(`Client watcher error: ${err}`))
|
||||
.on('ready', () => log.state('Monitoring:', options.monitor));
|
||||
}
|
||||
|
||||
// get file content for a valid url request
|
||||
function handle(url) {
|
||||
return new Promise((resolve) => {
|
||||
let obj = { ok: false };
|
||||
obj.file = url;
|
||||
if (!fs.existsSync(obj.file)) resolve(null);
|
||||
obj.stat = fs.statSync(obj.file);
|
||||
if (obj.stat.isFile()) obj.ok = true;
|
||||
if (!obj.ok && obj.stat.isDirectory()) {
|
||||
obj.file = path.join(obj.file, options.default);
|
||||
// @ts-ignore
|
||||
obj = handle(obj.file);
|
||||
}
|
||||
resolve(obj);
|
||||
});
|
||||
}
|
||||
|
||||
// process http requests
|
||||
async function httpRequest(req, res) {
|
||||
handle(path.join(__dirname, options.root, decodeURI(req.url)))
|
||||
.then((result) => {
|
||||
// get original ip of requestor, regardless if it's behind proxy or not
|
||||
// eslint-disable-next-line dot-notation
|
||||
const forwarded = (req.headers['forwarded'] || '').match(/for="\[(.*)\]:/);
|
||||
const ip = (Array.isArray(forwarded) ? forwarded[1] : null) || req.headers['x-forwarded-for'] || req.ip || req.socket.remoteAddress;
|
||||
if (!result || !result.ok) {
|
||||
res.writeHead(404, { 'Content-Type': 'text/html' });
|
||||
res.end('Error 404: Not Found\n', 'utf-8');
|
||||
log.warn(`${req.method}/${req.httpVersion}`, res.statusCode, req.url, ip);
|
||||
} else {
|
||||
const ext = String(path.extname(result.file)).toLowerCase();
|
||||
const contentType = mime[ext] || 'application/octet-stream';
|
||||
const accept = req.headers['accept-encoding'] ? req.headers['accept-encoding'].includes('br') : false; // does target accept brotli compressed data
|
||||
res.writeHead(200, {
|
||||
// 'Content-Length': result.stat.size, // not using as it's misleading for compressed streams
|
||||
'Content-Language': 'en', 'Content-Type': contentType, 'Content-Encoding': accept ? 'br' : '', 'Last-Modified': result.stat.mtime, 'Cache-Control': 'no-cache', 'X-Content-Type-Options': 'nosniff',
|
||||
});
|
||||
const compress = zlib.createBrotliCompress({ params: { [zlib.constants.BROTLI_PARAM_QUALITY]: 5 } }); // instance of brotli compression with level 5
|
||||
const stream = fs.createReadStream(result.file);
|
||||
if (!accept) stream.pipe(res); // don't compress data
|
||||
else stream.pipe(compress).pipe(res); // compress data
|
||||
|
||||
// alternative methods of sending data
|
||||
/// 2. read stream and send by chunk
|
||||
// const stream = fs.createReadStream(result.file);
|
||||
// stream.on('data', (chunk) => res.write(chunk));
|
||||
// stream.on('end', () => res.end());
|
||||
|
||||
// 3. read entire file and send it as blob
|
||||
// const data = fs.readFileSync(result.file);
|
||||
// res.write(data);
|
||||
log.data(`${req.method}/${req.httpVersion}`, res.statusCode, contentType, result.stat.size, req.url, ip);
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.catch((err) => log.error('handle error:', err));
|
||||
}
|
||||
|
||||
// app main entry point
|
||||
async function main() {
|
||||
log.header();
|
||||
await watch();
|
||||
if (options.httpPort && options.httpPort > 0) {
|
||||
const server1 = http.createServer(options, httpRequest);
|
||||
server1.on('listening', () => log.state('HTTP server listening:', options.httpPort));
|
||||
server1.listen(options.httpPort);
|
||||
}
|
||||
if (options.httpsPort && options.httpsPort > 0) {
|
||||
const server2 = http2.createSecureServer(options, httpRequest);
|
||||
server2.on('listening', () => log.state('HTTP2 server listening:', options.httpsPort));
|
||||
server2.listen(options.httpsPort);
|
||||
}
|
||||
await build.build('all', 'startup', true);
|
||||
}
|
||||
|
||||
main();
|
|
@ -10,9 +10,9 @@ export abstract class NeuralNetwork<TNetParams> {
|
|||
this._name = name;
|
||||
}
|
||||
|
||||
protected _params: TNetParams | undefined = undefined
|
||||
protected _params: TNetParams | undefined = undefined;
|
||||
|
||||
protected _paramMappings: ParamMapping[] = []
|
||||
protected _paramMappings: ParamMapping[] = [];
|
||||
|
||||
public _name: any;
|
||||
|
||||
|
@ -62,7 +62,7 @@ export abstract class NeuralNetwork<TNetParams> {
|
|||
});
|
||||
}
|
||||
|
||||
public dispose(throwOnRedispose: boolean = true) {
|
||||
public dispose(throwOnRedispose = true) {
|
||||
this.getParamList().forEach((param) => {
|
||||
if (throwOnRedispose && param.tensor.isDisposed) {
|
||||
throw new Error(`param tensor has already been disposed for path ${param.path}`);
|
||||
|
@ -102,8 +102,9 @@ export abstract class NeuralNetwork<TNetParams> {
|
|||
}
|
||||
const { readFile } = env.getEnv();
|
||||
const { manifestUri, modelBaseUri } = getModelUris(filePath, this.getDefaultModelName());
|
||||
const fetchWeightsFromDisk = (filePaths: string[]) => Promise.all(filePaths.map((fp) => readFile(fp).then((buf) => buf.buffer)));
|
||||
const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk);
|
||||
const fetchWeightsFromDisk = (filePaths: string[]) => Promise.all(filePaths.map((fp) => readFile(fp).then((buf) => (typeof buf === 'string' ? Buffer.from(buf) : buf.buffer))));
|
||||
// @ts-ignore async-vs-sync mismatch
|
||||
const loadWeights = tf['io'].weightsLoaderFactory(fetchWeightsFromDisk);
|
||||
const manifest = JSON.parse((await readFile(manifestUri)).toString());
|
||||
const weightMap = await loadWeights(manifest, modelBaseUri);
|
||||
this.loadFromWeightMap(weightMap);
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
export class PlatformBrowser {
|
||||
private textEncoder: TextEncoder;
|
||||
|
||||
constructor() {
|
||||
this.textEncoder = new TextEncoder();
|
||||
}
|
||||
|
||||
fetch(path: string, init?: any): Promise<Response> {
|
||||
return fetch(path, init);
|
||||
}
|
||||
|
@ -13,9 +17,6 @@ export class PlatformBrowser {
|
|||
if (encoding !== 'utf-8' && encoding !== 'utf8') {
|
||||
throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);
|
||||
}
|
||||
if (this.textEncoder == null) {
|
||||
this.textEncoder = new TextEncoder();
|
||||
}
|
||||
return this.textEncoder.encode(text);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import * as tf from '../../dist/tfjs.esm';
|
||||
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { fullyConnectedLayer } from '../common/fullyConnectedLayer';
|
||||
import { seperateWeightMaps } from '../faceProcessor/util';
|
||||
import { TinyXception } from '../xception/TinyXception';
|
||||
|
@ -10,7 +9,7 @@ import { NeuralNetwork } from '../NeuralNetwork';
|
|||
import { NetInput, TNetInput, toNetInput } from '../dom/index';
|
||||
|
||||
export class AgeGenderNet extends NeuralNetwork<NetParams> {
|
||||
private _faceFeatureExtractor: TinyXception
|
||||
private _faceFeatureExtractor: TinyXception;
|
||||
|
||||
constructor(faceFeatureExtractor: TinyXception = new TinyXception(2)) {
|
||||
super('AgeGenderNet');
|
||||
|
@ -85,7 +84,7 @@ export class AgeGenderNet extends NeuralNetwork<NetParams> {
|
|||
return 'age_gender_model';
|
||||
}
|
||||
|
||||
public dispose(throwOnRedispose: boolean = true) {
|
||||
public override dispose(throwOnRedispose = true) {
|
||||
this.faceFeatureExtractor.dispose(throwOnRedispose);
|
||||
super.dispose(throwOnRedispose);
|
||||
}
|
||||
|
|
|
@ -7,10 +7,8 @@ export interface IBoundingBox {
|
|||
bottom: number
|
||||
}
|
||||
|
||||
export class BoundingBox extends Box<BoundingBox> implements IBoundingBox {
|
||||
constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions: boolean = false) {
|
||||
super({
|
||||
left, top, right, bottom,
|
||||
}, allowNegativeDimensions);
|
||||
export class BoundingBox extends Box implements IBoundingBox {
|
||||
constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions = false) {
|
||||
super({ left, top, right, bottom }, allowNegativeDimensions);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
|
|||
return !!rect && [rect.x, rect.y, rect.width, rect.height].every(isValidNumber);
|
||||
}
|
||||
|
||||
public static assertIsValidBox(box: any, callee: string, allowNegativeDimensions: boolean = false) {
|
||||
public static assertIsValidBox(box: any, callee: string, allowNegativeDimensions = false) {
|
||||
if (!Box.isRect(box)) {
|
||||
throw new Error(`${callee} - invalid box: ${JSON.stringify(box)}, expected object with properties x, y, width, height`);
|
||||
}
|
||||
|
@ -19,15 +19,15 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
|
|||
}
|
||||
}
|
||||
|
||||
private _x: number
|
||||
private _x: number;
|
||||
|
||||
private _y: number
|
||||
private _y: number;
|
||||
|
||||
private _width: number
|
||||
private _width: number;
|
||||
|
||||
private _height: number
|
||||
private _height: number;
|
||||
|
||||
constructor(_box: IBoundingBox | IRect, allowNegativeDimensions: boolean = true) {
|
||||
constructor(_box: IBoundingBox | IRect, allowNegativeDimensions = true) {
|
||||
const box = (_box || {}) as any;
|
||||
|
||||
const isBbox = [box.left, box.top, box.right, box.bottom].every(isValidNumber);
|
||||
|
@ -128,9 +128,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
|
|||
this.width + padX,
|
||||
this.height + padY,
|
||||
];
|
||||
return new Box({
|
||||
x, y, width, height,
|
||||
});
|
||||
return new Box({ x, y, width, height });
|
||||
}
|
||||
|
||||
public clipAtImageBorders(imgWidth: number, imgHeight: number): Box<BoxType> {
|
||||
|
@ -143,9 +141,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
|
|||
const clippedWidth = Math.min(newWidth, imgWidth - clippedX);
|
||||
const clippedHeight = Math.min(newHeight, imgHeight - clippedY);
|
||||
|
||||
return (new Box({
|
||||
x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight,
|
||||
})).floor();
|
||||
return (new Box({ x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight })).floor();
|
||||
}
|
||||
|
||||
public shift(sx: number, sy: number): Box<BoxType> {
|
||||
|
@ -153,9 +149,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
|
|||
const x = this.x + sx;
|
||||
const y = this.y + sy;
|
||||
|
||||
return new Box({
|
||||
x, y, width, height,
|
||||
});
|
||||
return new Box({ x, y, width, height });
|
||||
}
|
||||
|
||||
public padAtBorders(imageHeight: number, imageWidth: number) {
|
||||
|
@ -189,9 +183,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
|
|||
y = 1;
|
||||
}
|
||||
|
||||
return {
|
||||
dy, edy, dx, edx, y, ey, x, ex, w, h,
|
||||
};
|
||||
return { dy, edy, dx, edx, y, ey, x, ex, w, h };
|
||||
}
|
||||
|
||||
public calibrate(region: Box) {
|
||||
|
|
|
@ -6,9 +6,9 @@ export interface IDimensions {
|
|||
}
|
||||
|
||||
export class Dimensions implements IDimensions {
|
||||
private _width: number
|
||||
private _width: number;
|
||||
|
||||
private _height: number
|
||||
private _height: number;
|
||||
|
||||
constructor(width: number, height: number) {
|
||||
if (!isValidNumber(width) || !isValidNumber(height)) {
|
||||
|
|
|
@ -17,7 +17,7 @@ export class FaceDetection extends ObjectDetection implements IFaceDetecion {
|
|||
super(score, score, '', relativeBox, imageDims);
|
||||
}
|
||||
|
||||
public forSize(width: number, height: number): FaceDetection {
|
||||
public override forSize(width: number, height: number): FaceDetection {
|
||||
const { score, relativeBox, imageDims } = super.forSize(width, height);
|
||||
return new FaceDetection(score, relativeBox, imageDims);
|
||||
}
|
||||
|
|
|
@ -18,11 +18,11 @@ export interface IFaceLandmarks {
|
|||
}
|
||||
|
||||
export class FaceLandmarks implements IFaceLandmarks {
|
||||
protected _shift: Point
|
||||
protected _shift: Point;
|
||||
|
||||
protected _positions: Point[]
|
||||
protected _positions: Point[];
|
||||
|
||||
protected _imgDims: Dimensions
|
||||
protected _imgDims: Dimensions;
|
||||
|
||||
constructor(
|
||||
relativeFaceLandmarkPositions: Point[],
|
||||
|
|
|
@ -3,7 +3,7 @@ import { FaceLandmarks } from './FaceLandmarks';
|
|||
import { Point } from './Point';
|
||||
|
||||
export class FaceLandmarks5 extends FaceLandmarks {
|
||||
protected getRefPointsForAlignment(): Point[] {
|
||||
protected override getRefPointsForAlignment(): Point[] {
|
||||
const pts = this.positions;
|
||||
return [
|
||||
pts[0],
|
||||
|
|
|
@ -31,7 +31,7 @@ export class FaceLandmarks68 extends FaceLandmarks {
|
|||
return this.positions.slice(48, 68);
|
||||
}
|
||||
|
||||
protected getRefPointsForAlignment(): Point[] {
|
||||
protected override getRefPointsForAlignment(): Point[] {
|
||||
return [
|
||||
this.getLeftEye(),
|
||||
this.getRightEye(),
|
||||
|
|
|
@ -6,9 +6,8 @@ export interface IFaceMatch {
|
|||
}
|
||||
|
||||
export class FaceMatch implements IFaceMatch {
|
||||
private _label: string
|
||||
|
||||
private _distance: number
|
||||
private _label: string;
|
||||
private _distance: number;
|
||||
|
||||
constructor(label: string, distance: number) {
|
||||
this._label = label;
|
||||
|
@ -19,7 +18,7 @@ export class FaceMatch implements IFaceMatch {
|
|||
|
||||
public get distance(): number { return this._distance; }
|
||||
|
||||
public toString(withDistance: boolean = true): string {
|
||||
public toString(withDistance = true): string {
|
||||
return `${this.label}${withDistance ? ` (${round(this.distance)})` : ''}`;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,16 +3,15 @@ import { IBoundingBox } from './BoundingBox';
|
|||
import { Box } from './Box';
|
||||
import { IRect } from './Rect';
|
||||
|
||||
export class LabeledBox extends Box<LabeledBox> {
|
||||
export class LabeledBox extends Box {
|
||||
public static assertIsValidLabeledBox(box: any, callee: string) {
|
||||
Box.assertIsValidBox(box, callee);
|
||||
|
||||
if (!isValidNumber(box.label)) {
|
||||
throw new Error(`${callee} - expected property label (${box.label}) to be a number`);
|
||||
}
|
||||
}
|
||||
|
||||
private _label: number
|
||||
private _label: number;
|
||||
|
||||
constructor(box: IBoundingBox | IRect | any, label: number) {
|
||||
super(box);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
export class LabeledFaceDescriptors {
|
||||
private _label: string
|
||||
private _label: string;
|
||||
|
||||
private _descriptors: Float32Array[]
|
||||
private _descriptors: Float32Array[];
|
||||
|
||||
constructor(label: string, descriptors: Float32Array[]) {
|
||||
if (!(typeof label === 'string')) {
|
||||
|
|
|
@ -3,15 +3,15 @@ import { Dimensions, IDimensions } from './Dimensions';
|
|||
import { IRect, Rect } from './Rect';
|
||||
|
||||
export class ObjectDetection {
|
||||
private _score: number
|
||||
private _score: number;
|
||||
|
||||
private _classScore: number
|
||||
private _classScore: number;
|
||||
|
||||
private _className: string
|
||||
private _className: string;
|
||||
|
||||
private _box: Rect
|
||||
private _box: Rect;
|
||||
|
||||
private _imageDims: Dimensions
|
||||
private _imageDims: Dimensions;
|
||||
|
||||
constructor(
|
||||
score: number,
|
||||
|
|
|
@ -4,9 +4,9 @@ export interface IPoint {
|
|||
}
|
||||
|
||||
export class Point implements IPoint {
|
||||
private _x: number
|
||||
private _x: number;
|
||||
|
||||
private _y: number
|
||||
private _y: number;
|
||||
|
||||
constructor(x: number, y: number) {
|
||||
this._x = x;
|
||||
|
|
|
@ -15,9 +15,9 @@ export class PredictedBox extends LabeledBox {
|
|||
}
|
||||
}
|
||||
|
||||
private _score: number
|
||||
private _score: number;
|
||||
|
||||
private _classScore: number
|
||||
private _classScore: number;
|
||||
|
||||
constructor(box: IBoundingBox | IRect | any, label: number, score: number, classScore: number) {
|
||||
super(box, label);
|
||||
|
|
|
@ -7,10 +7,8 @@ export interface IRect {
|
|||
height: number
|
||||
}
|
||||
|
||||
export class Rect extends Box<Rect> implements IRect {
|
||||
constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions: boolean = false) {
|
||||
super({
|
||||
x, y, width, height,
|
||||
}, allowNegativeDimensions);
|
||||
export class Rect extends Box implements IRect {
|
||||
constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions = false) {
|
||||
super({ x, y, width, height }, allowNegativeDimensions);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ export function convLayer(
|
|||
x: tf.Tensor4D,
|
||||
params: ConvParams,
|
||||
padding: 'valid' | 'same' = 'same',
|
||||
withRelu: boolean = false,
|
||||
withRelu = false,
|
||||
): tf.Tensor4D {
|
||||
return tf.tidy(() => {
|
||||
const out = tf.add(
|
||||
|
|
|
@ -9,19 +9,19 @@ import { imageToSquare } from './imageToSquare';
|
|||
import { TResolvedNetInput } from './types';
|
||||
|
||||
export class NetInput {
|
||||
private _imageTensors: Array<tf.Tensor3D | tf.Tensor4D> = []
|
||||
private _imageTensors: Array<tf.Tensor3D | tf.Tensor4D> = [];
|
||||
|
||||
private _canvases: HTMLCanvasElement[] = []
|
||||
private _canvases: HTMLCanvasElement[] = [];
|
||||
|
||||
private _batchSize: number
|
||||
private _batchSize: number;
|
||||
|
||||
private _treatAsBatchInput: boolean = false
|
||||
private _treatAsBatchInput = false;
|
||||
|
||||
private _inputDimensions: number[][] = []
|
||||
private _inputDimensions: number[][] = [];
|
||||
|
||||
private _inputSize: number
|
||||
private _inputSize = 0;
|
||||
|
||||
constructor(inputs: Array<TResolvedNetInput>, treatAsBatchInput: boolean = false) {
|
||||
constructor(inputs: Array<TResolvedNetInput>, treatAsBatchInput = false) {
|
||||
if (!Array.isArray(inputs)) {
|
||||
throw new Error(`NetInput.constructor - expected inputs to be an Array of TResolvedNetInput or to be instanceof tf.Tensor4D, instead have ${inputs}`);
|
||||
}
|
||||
|
@ -47,8 +47,9 @@ export class NetInput {
|
|||
return;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
const canvas = (input as any) instanceof env.getEnv().Canvas ? input : createCanvasFromMedia(input);
|
||||
this._canvases[idx] = canvas;
|
||||
this._canvases[idx] = canvas as HTMLCanvasElement;
|
||||
this._inputDimensions[idx] = [canvas.height, canvas.width, 3];
|
||||
});
|
||||
}
|
||||
|
@ -118,7 +119,7 @@ export class NetInput {
|
|||
* both sides of the minor dimension oof the image.
|
||||
* @returns The batch tensor.
|
||||
*/
|
||||
public toBatchTensor(inputSize: number, isCenterInputs: boolean = true): tf.Tensor4D {
|
||||
public toBatchTensor(inputSize: number, isCenterInputs = true): tf.Tensor4D {
|
||||
this._inputSize = inputSize;
|
||||
|
||||
return tf.tidy(() => {
|
||||
|
@ -127,23 +128,24 @@ export class NetInput {
|
|||
|
||||
if (input instanceof tf.Tensor) {
|
||||
let imgTensor = isTensor4D(input) ? input : tf.expandDims(input);
|
||||
imgTensor = padToSquare(imgTensor, isCenterInputs);
|
||||
imgTensor = padToSquare(imgTensor as tf.Tensor4D, isCenterInputs);
|
||||
|
||||
if (imgTensor.shape[1] !== inputSize || imgTensor.shape[2] !== inputSize) {
|
||||
imgTensor = tf.image.resizeBilinear(imgTensor, [inputSize, inputSize], false, false);
|
||||
imgTensor = tf['image'].resizeBilinear(imgTensor as tf.Tensor4D, [inputSize, inputSize], false, false);
|
||||
}
|
||||
|
||||
return imgTensor.as3D(inputSize, inputSize, 3);
|
||||
}
|
||||
|
||||
if (input instanceof env.getEnv().Canvas) {
|
||||
return tf.browser.fromPixels(imageToSquare(input, inputSize, isCenterInputs));
|
||||
return tf['browser'].fromPixels(imageToSquare(input, inputSize, isCenterInputs));
|
||||
}
|
||||
|
||||
throw new Error(`toBatchTensor - at batchIdx ${batchIdx}, expected input to be instanceof tf.Tensor or instanceof HTMLCanvasElement, instead have ${input}`);
|
||||
});
|
||||
|
||||
const batchTensor = tf.stack(inputTensors.map((t) => tf.cast(t, 'float32'))).as4D(this.batchSize, inputSize, inputSize, 3);
|
||||
// const batchTensor = tf.stack(inputTensors.map((t) => tf.cast(t, 'float32'))) as tf.Tensor4D;
|
||||
|
||||
return batchTensor;
|
||||
});
|
||||
|
|
|
@ -4,7 +4,10 @@ import { isMediaLoaded } from './isMediaLoaded';
|
|||
export function awaitMediaLoaded(media: HTMLImageElement | HTMLVideoElement | HTMLCanvasElement) {
|
||||
// eslint-disable-next-line consistent-return
|
||||
return new Promise((resolve, reject) => {
|
||||
if (media instanceof env.getEnv().Canvas || isMediaLoaded(media)) return resolve(null);
|
||||
if (media instanceof env.getEnv().Canvas || isMediaLoaded(media)) {
|
||||
resolve(null);
|
||||
return;
|
||||
}
|
||||
|
||||
function onError(e: Event) {
|
||||
if (!e.currentTarget) return;
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
import { env } from '../env/index';
|
||||
|
||||
export function bufferToVideo(buf: Blob): Promise<HTMLVideoElement> {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!(buf instanceof Blob)) reject(new Error('bufferToVideo - expected buf to be of type: Blob'));
|
||||
|
||||
const video = env.getEnv().createVideoElement();
|
||||
video.oncanplay = () => resolve(video);
|
||||
video.onerror = reject;
|
||||
video.playsInline = true;
|
||||
video.muted = true;
|
||||
video.src = URL.createObjectURL(buf);
|
||||
video.play();
|
||||
});
|
||||
}
|
|
@ -25,17 +25,11 @@ export async function extractFaceTensors(imageTensor: tf.Tensor3D | tf.Tensor4D,
|
|||
|
||||
return tf.tidy(() => {
|
||||
const [imgHeight, imgWidth, numChannels] = imageTensor.shape.slice(isTensor4D(imageTensor) ? 1 : 0);
|
||||
|
||||
const boxes = detections
|
||||
.map((det) => (det instanceof FaceDetection
|
||||
? det.forSize(imgWidth, imgHeight).box
|
||||
: det))
|
||||
const boxes = detections.map((det) => (det instanceof FaceDetection ? det.forSize(imgWidth, imgHeight).box : det))
|
||||
.map((box) => box.clipAtImageBorders(imgWidth, imgHeight));
|
||||
|
||||
const faceTensors = boxes.map(({
|
||||
x, y, width, height,
|
||||
}) => tf.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels]));
|
||||
|
||||
const faceTensors = boxes
|
||||
.filter((box) => box.width > 0 && box.height > 0)
|
||||
.map(({ x, y, width, height }) => tf.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels]));
|
||||
return faceTensors;
|
||||
});
|
||||
}
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
import { bufferToVideo } from './bufferToVideo';
|
||||
import { fetchOrThrow } from './fetchOrThrow';
|
||||
|
||||
export async function fetchVideo(uri: string): Promise<HTMLVideoElement> {
|
||||
const res = await fetchOrThrow(uri);
|
||||
const blob = await (res).blob();
|
||||
|
||||
if (!blob.type.startsWith('video/')) {
|
||||
throw new Error(`fetchVideo - expected blob type to be of type video/*, instead have: ${blob.type}, for url: ${res.url}`);
|
||||
}
|
||||
return bufferToVideo(blob);
|
||||
}
|
|
@ -3,21 +3,10 @@ import { resolveInput } from './resolveInput';
|
|||
|
||||
export function getContext2dOrThrow(canvasArg: string | HTMLCanvasElement | CanvasRenderingContext2D): CanvasRenderingContext2D {
|
||||
const { Canvas, CanvasRenderingContext2D } = env.getEnv();
|
||||
|
||||
if (canvasArg instanceof CanvasRenderingContext2D) {
|
||||
return canvasArg;
|
||||
}
|
||||
|
||||
if (canvasArg instanceof CanvasRenderingContext2D) return canvasArg;
|
||||
const canvas = resolveInput(canvasArg);
|
||||
|
||||
if (!(canvas instanceof Canvas)) {
|
||||
throw new Error('resolveContext2d - expected canvas to be of instance of Canvas');
|
||||
}
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (!ctx) {
|
||||
throw new Error('resolveContext2d - canvas 2d context is null');
|
||||
}
|
||||
|
||||
if (!(canvas instanceof Canvas)) throw new Error('resolveContext2d - expected canvas to be of instance of Canvas');
|
||||
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
||||
if (!ctx) throw new Error('resolveContext2d - canvas 2d context is null');
|
||||
return ctx;
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ export async function imageTensorToCanvas(
|
|||
|
||||
const [height, width, numChannels] = imgTensor.shape.slice(isTensor4D(imgTensor) ? 1 : 0);
|
||||
const imgTensor3D = tf.tidy(() => imgTensor.as3D(height, width, numChannels).toInt());
|
||||
await tf.browser.toPixels(imgTensor3D, targetCanvas);
|
||||
await tf['browser'].toPixels(imgTensor3D, targetCanvas);
|
||||
|
||||
imgTensor3D.dispose();
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import { createCanvas, createCanvasFromMedia } from './createCanvas';
|
|||
import { getContext2dOrThrow } from './getContext2dOrThrow';
|
||||
import { getMediaDimensions } from './getMediaDimensions';
|
||||
|
||||
export function imageToSquare(input: HTMLImageElement | HTMLCanvasElement, inputSize: number, centerImage: boolean = false) {
|
||||
export function imageToSquare(input: HTMLImageElement | HTMLCanvasElement, inputSize: number, centerImage = false) {
|
||||
const { Image, Canvas } = env.getEnv();
|
||||
|
||||
if (!(input instanceof Image || input instanceof Canvas)) {
|
||||
|
|
|
@ -7,6 +7,7 @@ export * from './fetchImage';
|
|||
export * from './fetchJson';
|
||||
export * from './fetchNetWeights';
|
||||
export * from './fetchOrThrow';
|
||||
export * from './fetchVideo';
|
||||
export * from './getContext2dOrThrow';
|
||||
export * from './getMediaDimensions';
|
||||
export * from './imageTensorToCanvas';
|
||||
|
|
|
@ -8,7 +8,8 @@ export async function loadWeightMap(
|
|||
defaultModelName: string,
|
||||
): Promise<tf.NamedTensorMap> {
|
||||
const { manifestUri, modelBaseUri } = getModelUris(uri, defaultModelName);
|
||||
// @ts-ignore
|
||||
const manifest = await fetchJson<tf.io.WeightsManifestConfig>(manifestUri);
|
||||
// if (manifest['weightsManifest']) manifest = manifest['weightsManifest'];
|
||||
return tf.io.loadWeights(manifest, modelBaseUri);
|
||||
return tf['io'].loadWeights(manifest, modelBaseUri);
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { IDimensions } from '../classes/index';
|
||||
import { getMediaDimensions } from './getMediaDimensions';
|
||||
|
||||
export function matchDimensions(input: IDimensions, reference: IDimensions, useMediaDimensions: boolean = false) {
|
||||
export function matchDimensions(input: IDimensions, reference: IDimensions, useMediaDimensions = false) {
|
||||
const { width, height } = useMediaDimensions
|
||||
? getMediaDimensions(reference)
|
||||
: reference;
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
import * as tf from '../../dist/tfjs.esm';
|
||||
import type { Tensor3D, Tensor4D } from '../../dist/tfjs.esm';
|
||||
|
||||
import { NetInput } from './NetInput';
|
||||
|
||||
export type TMediaElement = HTMLImageElement | HTMLVideoElement | HTMLCanvasElement
|
||||
|
||||
export type TResolvedNetInput = TMediaElement | tf.Tensor3D | tf.Tensor4D
|
||||
export type TResolvedNetInput = TMediaElement | Tensor3D | Tensor4D
|
||||
|
||||
export type TNetInputArg = string | TResolvedNetInput
|
||||
|
||||
export type TNetInput = TNetInputArg | Array<TNetInputArg> | NetInput | tf.Tensor4D
|
||||
export type TNetInput = string | TResolvedNetInput | Array<string | TResolvedNetInput> | NetInput
|
||||
|
|
|
@ -11,13 +11,13 @@ export interface IDrawBoxOptions {
|
|||
}
|
||||
|
||||
export class DrawBoxOptions {
|
||||
public boxColor: string
|
||||
public boxColor: string;
|
||||
|
||||
public lineWidth: number
|
||||
public lineWidth: number;
|
||||
|
||||
public drawLabelOptions: DrawTextFieldOptions
|
||||
public drawLabelOptions: DrawTextFieldOptions;
|
||||
|
||||
public label?: string
|
||||
public label?: string;
|
||||
|
||||
constructor(options: IDrawBoxOptions = {}) {
|
||||
const {
|
||||
|
@ -36,9 +36,9 @@ export class DrawBoxOptions {
|
|||
}
|
||||
|
||||
export class DrawBox {
|
||||
public box: Box
|
||||
public box: Box;
|
||||
|
||||
public options: DrawBoxOptions
|
||||
public options: DrawBoxOptions;
|
||||
|
||||
constructor(
|
||||
box: IBoundingBox | IRect,
|
||||
|
|
|
@ -17,17 +17,17 @@ export interface IDrawFaceLandmarksOptions {
|
|||
}
|
||||
|
||||
export class DrawFaceLandmarksOptions {
|
||||
public drawLines: boolean
|
||||
public drawLines: boolean;
|
||||
|
||||
public drawPoints: boolean
|
||||
public drawPoints: boolean;
|
||||
|
||||
public lineWidth: number
|
||||
public lineWidth: number;
|
||||
|
||||
public pointSize: number
|
||||
public pointSize: number;
|
||||
|
||||
public lineColor: string
|
||||
public lineColor: string;
|
||||
|
||||
public pointColor: string
|
||||
public pointColor: string;
|
||||
|
||||
constructor(options: IDrawFaceLandmarksOptions = {}) {
|
||||
const {
|
||||
|
@ -43,9 +43,9 @@ export class DrawFaceLandmarksOptions {
|
|||
}
|
||||
|
||||
export class DrawFaceLandmarks {
|
||||
public faceLandmarks: FaceLandmarks
|
||||
public faceLandmarks: FaceLandmarks;
|
||||
|
||||
public options: DrawFaceLandmarksOptions
|
||||
public options: DrawFaceLandmarksOptions;
|
||||
|
||||
constructor(
|
||||
faceLandmarks: FaceLandmarks,
|
||||
|
|
|
@ -25,17 +25,17 @@ export interface IDrawTextFieldOptions {
|
|||
}
|
||||
|
||||
export class DrawTextFieldOptions implements IDrawTextFieldOptions {
|
||||
public anchorPosition: AnchorPosition
|
||||
public anchorPosition: AnchorPosition;
|
||||
|
||||
public backgroundColor: string
|
||||
public backgroundColor: string;
|
||||
|
||||
public fontColor: string
|
||||
public fontColor: string;
|
||||
|
||||
public fontSize: number
|
||||
public fontSize: number;
|
||||
|
||||
public fontStyle: string
|
||||
public fontStyle: string;
|
||||
|
||||
public padding: number
|
||||
public padding: number;
|
||||
|
||||
constructor(options: IDrawTextFieldOptions = {}) {
|
||||
const {
|
||||
|
@ -51,11 +51,11 @@ export class DrawTextFieldOptions implements IDrawTextFieldOptions {
|
|||
}
|
||||
|
||||
export class DrawTextField {
|
||||
public text: string[]
|
||||
public text: string[];
|
||||
|
||||
public anchor : IPoint
|
||||
public anchor : IPoint;
|
||||
|
||||
public options: DrawTextFieldOptions
|
||||
public options: DrawTextFieldOptions;
|
||||
|
||||
constructor(
|
||||
text: string | string[] | DrawTextField,
|
||||
|
|
|
@ -3,7 +3,7 @@ import { Point } from '../classes/index';
|
|||
export function drawContour(
|
||||
ctx: CanvasRenderingContext2D,
|
||||
points: Point[],
|
||||
isClosed: boolean = false,
|
||||
isClosed = false,
|
||||
) {
|
||||
ctx.beginPath();
|
||||
|
||||
|
|
|
@ -7,12 +7,7 @@ import { DrawTextField } from './DrawTextField';
|
|||
|
||||
export type DrawFaceExpressionsInput = FaceExpressions | WithFaceExpressions<{}>
|
||||
|
||||
export function drawFaceExpressions(
|
||||
canvasArg: string | HTMLCanvasElement,
|
||||
faceExpressions: DrawFaceExpressionsInput | Array<DrawFaceExpressionsInput>,
|
||||
minConfidence = 0.1,
|
||||
textFieldAnchor?: IPoint,
|
||||
) {
|
||||
export function drawFaceExpressions(canvasArg: string | HTMLCanvasElement, faceExpressions: DrawFaceExpressionsInput | Array<DrawFaceExpressionsInput>, minConfidence = 0.1, textFieldAnchor?: IPoint) {
|
||||
const faceExpressionsArray = Array.isArray(faceExpressions) ? faceExpressions : [faceExpressions];
|
||||
|
||||
faceExpressionsArray.forEach((e) => {
|
||||
|
|
|
@ -16,6 +16,7 @@ export function createBrowserEnv(): Environment {
|
|||
Video: HTMLVideoElement,
|
||||
createCanvasElement: () => document.createElement('canvas'),
|
||||
createImageElement: () => document.createElement('img'),
|
||||
createVideoElement: () => document.createElement('video'),
|
||||
fetch,
|
||||
readFile,
|
||||
};
|
||||
|
|
|
@ -1,26 +1,20 @@
|
|||
import { FileSystem } from './types';
|
||||
import { isNodejs } from './isNodejs';
|
||||
|
||||
export function createFileSystem(fs?: any): FileSystem {
|
||||
let requireFsError = '';
|
||||
|
||||
if (!fs) {
|
||||
if (!fs && isNodejs()) {
|
||||
try {
|
||||
// eslint-disable-next-line global-require
|
||||
// eslint-disable-next-line global-require, @typescript-eslint/no-require-imports
|
||||
fs = require('fs');
|
||||
} catch (err) {
|
||||
requireFsError = err.toString();
|
||||
requireFsError = (err as any).toString();
|
||||
}
|
||||
}
|
||||
|
||||
const readFile = fs
|
||||
? (filePath: string) => new Promise<Buffer>((resolve, reject) => {
|
||||
fs.readFile(filePath, (err: any, buffer: Buffer) => (err ? reject(err) : resolve(buffer)));
|
||||
})
|
||||
: () => {
|
||||
throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`);
|
||||
};
|
||||
|
||||
return {
|
||||
readFile,
|
||||
};
|
||||
// eslint-disable-next-line no-undef
|
||||
? (filePath: string) => new Promise<string | Buffer>((resolve, reject) => { fs.readFile(filePath, (err: NodeJS.ErrnoException | null, buffer: string | Buffer) => (err ? reject(err) : resolve(buffer))); })
|
||||
: () => { throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`); };
|
||||
return { readFile };
|
||||
}
|
||||
|
|
|
@ -3,9 +3,9 @@ import { createFileSystem } from './createFileSystem';
|
|||
import { Environment } from './types';
|
||||
|
||||
export function createNodejsEnv(): Environment {
|
||||
// eslint-disable-next-line dot-notation
|
||||
const Canvas = global['Canvas'] || global.HTMLCanvasElement;
|
||||
const Canvas: (new () => HTMLCanvasElement) = (global as any)['Canvas'] || global.HTMLCanvasElement;
|
||||
const Image = global.Image || global.HTMLImageElement;
|
||||
const Video: (new () => HTMLVideoElement) = (global as any)['Video'] || global.HTMLVideoElement;
|
||||
|
||||
const createCanvasElement = () => {
|
||||
if (Canvas) return new Canvas();
|
||||
|
@ -17,6 +17,11 @@ export function createNodejsEnv(): Environment {
|
|||
throw new Error('createImageElement - missing Image implementation for nodejs environment');
|
||||
};
|
||||
|
||||
const createVideoElement = () => {
|
||||
if (Video) return new Video();
|
||||
throw new Error('createVideoElement - missing Video implementation for nodejs environment');
|
||||
};
|
||||
|
||||
const fetch = global.fetch;
|
||||
// if (!fetch) throw new Error('fetch - missing fetch implementation for nodejs environment');
|
||||
|
||||
|
@ -30,6 +35,7 @@ export function createNodejsEnv(): Environment {
|
|||
Video: global.HTMLVideoElement || class {},
|
||||
createCanvasElement,
|
||||
createImageElement,
|
||||
createVideoElement,
|
||||
fetch,
|
||||
...fileSystem,
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
export function isNodejs(): boolean {
|
||||
return typeof global === 'object'
|
||||
&& typeof require === 'function'
|
||||
&& typeof module !== 'undefined'
|
||||
&& typeof process !== 'undefined' && !!process.version;
|
||||
&& typeof process !== 'undefined'
|
||||
&& process.versions != null
|
||||
&& process.versions.node != null;
|
||||
}
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
export type FileSystem = {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
readFile: (filePath: string) => Promise<Buffer>
|
||||
}
|
||||
readFile: (filePath: string) => Promise<string | Buffer>;
|
||||
};
|
||||
|
||||
export type Environment = FileSystem & {
|
||||
Canvas: typeof HTMLCanvasElement
|
||||
CanvasRenderingContext2D: typeof CanvasRenderingContext2D
|
||||
Image: typeof HTMLImageElement
|
||||
ImageData: typeof ImageData
|
||||
Video: typeof HTMLVideoElement
|
||||
createCanvasElement: () => HTMLCanvasElement
|
||||
createImageElement: () => HTMLImageElement
|
||||
Canvas: typeof HTMLCanvasElement;
|
||||
CanvasRenderingContext2D: typeof CanvasRenderingContext2D;
|
||||
Image: typeof HTMLImageElement;
|
||||
ImageData: typeof ImageData;
|
||||
Video: typeof HTMLVideoElement;
|
||||
createCanvasElement: () => HTMLCanvasElement;
|
||||
createImageElement: () => HTMLImageElement;
|
||||
createVideoElement: () => HTMLVideoElement;
|
||||
// eslint-disable-next-line no-undef, no-unused-vars
|
||||
fetch: (url: string, init?: RequestInit) => Promise<Response>
|
||||
}
|
||||
fetch: (url: string, init?: RequestInit) => Promise<Response>;
|
||||
};
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
export function euclideanDistance(arr1: number[] | Float32Array, arr2: number[] | Float32Array) {
|
||||
if (arr1.length !== arr2.length) throw new Error('euclideanDistance: arr1.length !== arr2.length');
|
||||
|
||||
const desc1 = Array.from(arr1);
|
||||
const desc2 = Array.from(arr2);
|
||||
|
||||
return Math.sqrt(
|
||||
desc1
|
||||
.map((val, i) => val - desc2[i])
|
||||
.reduce((res, diff) => res + (diff ** 2), 0),
|
||||
.reduce((res, diff) => res + (diff * diff), 0),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,19 +1,13 @@
|
|||
export const FACE_EXPRESSION_LABELS = ['neutral', 'happy', 'sad', 'angry', 'fearful', 'disgusted', 'surprised'];
|
||||
export const FACE_EXPRESSION_LABELS = ['neutral', 'happy', 'sad', 'angry', 'fearful', 'disgusted', 'surprised'] as const;
|
||||
|
||||
export class FaceExpressions {
|
||||
public neutral: number
|
||||
|
||||
public happy: number
|
||||
|
||||
public sad: number
|
||||
|
||||
public angry: number
|
||||
|
||||
public fearful: number
|
||||
|
||||
public disgusted: number
|
||||
|
||||
public surprised: number
|
||||
public neutral = 0;
|
||||
public happy = 0;
|
||||
public sad = 0;
|
||||
public angry = 0;
|
||||
public fearful = 0;
|
||||
public disgusted = 0;
|
||||
public surprised = 0;
|
||||
|
||||
constructor(probabilities: number[] | Float32Array) {
|
||||
if (probabilities.length !== 7) {
|
||||
|
|
|
@ -7,7 +7,7 @@ import { DenseBlock3Params, DenseBlock4Params } from './types';
|
|||
export function denseBlock3(
|
||||
x: tf.Tensor4D,
|
||||
denseBlockParams: DenseBlock3Params,
|
||||
isFirstLayer: boolean = false,
|
||||
isFirstLayer = false,
|
||||
): tf.Tensor4D {
|
||||
return tf.tidy(() => {
|
||||
const out1 = tf.relu(
|
||||
|
@ -30,8 +30,8 @@ export function denseBlock3(
|
|||
export function denseBlock4(
|
||||
x: tf.Tensor4D,
|
||||
denseBlockParams: DenseBlock4Params,
|
||||
isFirstLayer: boolean = false,
|
||||
isScaleDown: boolean = true,
|
||||
isFirstLayer = false,
|
||||
isScaleDown = true,
|
||||
): tf.Tensor4D {
|
||||
return tf.tidy(() => {
|
||||
const out1 = tf.relu(
|
||||
|
|
|
@ -5,7 +5,7 @@ export function extractorsFactory(extractWeights: ExtractWeightsFunction, paramM
|
|||
const extractConvParams = extractConvParamsFactory(extractWeights, paramMappings);
|
||||
const extractSeparableConvParams = extractSeparableConvParamsFactory(extractWeights, paramMappings);
|
||||
|
||||
function extractDenseBlock3Params(channelsIn: number, channelsOut: number, mappedPrefix: string, isFirstLayer: boolean = false): DenseBlock3Params {
|
||||
function extractDenseBlock3Params(channelsIn: number, channelsOut: number, mappedPrefix: string, isFirstLayer = false): DenseBlock3Params {
|
||||
const conv0 = isFirstLayer
|
||||
? extractConvParams(channelsIn, channelsOut, 3, `${mappedPrefix}/conv0`)
|
||||
: extractSeparableConvParams(channelsIn, channelsOut, `${mappedPrefix}/conv0`);
|
||||
|
@ -15,7 +15,7 @@ export function extractorsFactory(extractWeights: ExtractWeightsFunction, paramM
|
|||
return { conv0, conv1, conv2 };
|
||||
}
|
||||
|
||||
function extractDenseBlock4Params(channelsIn: number, channelsOut: number, mappedPrefix: string, isFirstLayer: boolean = false): DenseBlock4Params {
|
||||
function extractDenseBlock4Params(channelsIn: number, channelsOut: number, mappedPrefix: string, isFirstLayer = false): DenseBlock4Params {
|
||||
const { conv0, conv1, conv2 } = extractDenseBlock3Params(channelsIn, channelsOut, mappedPrefix, isFirstLayer);
|
||||
const conv3 = extractSeparableConvParams(channelsOut, channelsOut, `${mappedPrefix}/conv3`);
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ export function loadParamsFactory(weightMap: any, paramMappings: ParamMapping[])
|
|||
const extractConvParams = loadConvParamsFactory(extractWeightEntry);
|
||||
const extractSeparableConvParams = loadSeparableConvParamsFactory(extractWeightEntry);
|
||||
|
||||
function extractDenseBlock3Params(prefix: string, isFirstLayer: boolean = false): DenseBlock3Params {
|
||||
function extractDenseBlock3Params(prefix: string, isFirstLayer = false): DenseBlock3Params {
|
||||
const conv0 = isFirstLayer
|
||||
? extractConvParams(`${prefix}/conv0`)
|
||||
: extractSeparableConvParams(`${prefix}/conv0`);
|
||||
|
@ -18,7 +18,7 @@ export function loadParamsFactory(weightMap: any, paramMappings: ParamMapping[])
|
|||
return { conv0, conv1, conv2 };
|
||||
}
|
||||
|
||||
function extractDenseBlock4Params(prefix: string, isFirstLayer: boolean = false): DenseBlock4Params {
|
||||
function extractDenseBlock4Params(prefix: string, isFirstLayer = false): DenseBlock4Params {
|
||||
const conv0 = isFirstLayer
|
||||
? extractConvParams(`${prefix}/conv0`)
|
||||
: extractSeparableConvParams(`${prefix}/conv0`);
|
||||
|
|
|
@ -13,7 +13,7 @@ export abstract class FaceProcessor<
|
|||
TExtractorParams extends FaceFeatureExtractorParams | TinyFaceFeatureExtractorParams
|
||||
>
|
||||
extends NeuralNetwork<NetParams> {
|
||||
protected _faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>
|
||||
protected _faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>;
|
||||
|
||||
constructor(_name: string, faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>) {
|
||||
super(_name);
|
||||
|
@ -24,7 +24,7 @@ export abstract class FaceProcessor<
|
|||
return this._faceFeatureExtractor;
|
||||
}
|
||||
|
||||
protected abstract getDefaultModelName(): string
|
||||
protected abstract override getDefaultModelName(): string
|
||||
|
||||
protected abstract getClassifierChannelsIn(): number
|
||||
|
||||
|
@ -45,7 +45,7 @@ export abstract class FaceProcessor<
|
|||
});
|
||||
}
|
||||
|
||||
public dispose(throwOnRedispose: boolean = true) {
|
||||
public override dispose(throwOnRedispose = true) {
|
||||
this.faceFeatureExtractor.dispose(throwOnRedispose);
|
||||
super.dispose(throwOnRedispose);
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ export class FaceRecognitionNet extends NeuralNetwork<NetParams> {
|
|||
const globalAvg = out.mean([1, 2]) as tf.Tensor2D;
|
||||
const fullyConnected = tf.matMul(globalAvg, params.fc);
|
||||
|
||||
return fullyConnected;
|
||||
return fullyConnected as tf.Tensor2D;
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -60,6 +60,7 @@ export class FaceRecognitionNet extends NeuralNetwork<NetParams> {
|
|||
}
|
||||
|
||||
public async computeFaceDescriptor(input: TNetInput): Promise<Float32Array|Float32Array[]> {
|
||||
// @ts-ignore
|
||||
if (input?.shape?.some((dim) => dim <= 0)) return new Float32Array(128);
|
||||
const netInput = await toNetInput(input);
|
||||
const faceDescriptorTensors = tf.tidy(() => tf.unstack(this.forwardInput(netInput)));
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue