Compare commits

...

129 Commits

Author SHA1 Message Date
Vladimir Mandic 189226d63a full rebuild
Signed-off-by: Vladimir Mandic <mandic00@live.com>
2025-02-05 09:15:34 -05:00
Vladimir Mandic f587b44f66 1.7.15 2025-02-05 09:02:09 -05:00
Vladimir Mandic e3f11b8533 update build platform
Signed-off-by: Vladimir Mandic <mandic00@live.com>
2025-02-05 09:02:06 -05:00
Vladimir Mandic 171d17cadf update changelog 2024-09-10 11:31:01 -04:00
Vladimir Mandic e4cdf624c9 update build environment and full rebuild 2024-09-10 11:30:23 -04:00
Vladimir Mandic c633f9fbe4 1.7.14 2024-09-10 11:17:44 -04:00
Vladimir Mandic ffc3c40362 rebuild 2024-01-20 15:46:59 -05:00
Vladimir Mandic a8193f9077
Merge pull request #188 from rebser/master
fixing leaking EventHandlers when using HTMLCanvasElement
2024-01-20 15:45:04 -05:00
rebser 155f07dccd
fixing leaking EventHandlers when using HTMLCanvasElement 2024-01-19 08:38:59 +01:00
Vladimir Mandic 2f0469fe6e update readme 2024-01-17 17:04:22 -05:00
Vladimir Mandic 697b265337 rebuild types 2024-01-17 17:01:20 -05:00
Vladimir Mandic 4719b81587 rebuild 2024-01-17 16:56:53 -05:00
Vladimir Mandic fc9a39ea13 1.7.13 2024-01-17 16:44:28 -05:00
Vladimir Mandic 438897c5a2 update all dependencies 2024-01-17 16:44:24 -05:00
Vladimir Mandic f4d4780267
Merge pull request #186 from khwalkowicz/master
feat: enable noImplicitAny
2024-01-17 16:06:03 -05:00
Kamil H. Walkowicz a5c767fdff feat: enable noImplicitAny 2024-01-16 18:09:52 +01:00
Vladimir Mandic 1fa29b0fd3 update tfjs and rebuild 2023-06-12 12:02:21 -04:00
Vladimir Mandic 472f2e4480 1.7.12 2023-06-12 12:01:45 -04:00
Vladimir Mandic 4433ce44bc update dependencies 2023-05-08 09:08:30 -04:00
Vladimir Mandic 4ca829f941 1.7.11 2023-05-08 09:08:05 -04:00
Vladimir Mandic 038349968c update tfjs 2023-03-21 08:00:18 -04:00
Vladimir Mandic ae96c7b230 1.7.10 2023-03-21 07:59:27 -04:00
Vladimir Mandic f9f036ba01 change typedefs 2023-01-29 10:08:46 -05:00
Vladimir Mandic 0736a99250 1.7.9 2023-01-29 09:00:29 -05:00
Vladimir Mandic 3ea729badb update dependencies 2023-01-21 09:06:35 -05:00
Vladimir Mandic d36ed6d266 update changelog 2023-01-06 13:25:52 -05:00
Vladimir Mandic 4061d4d62f update tfjs 2023-01-06 13:24:17 -05:00
Vladimir Mandic b034c46f80 1.7.8 2023-01-06 13:04:31 -05:00
Vladimir Mandic aefd776a9e update dependencies 2022-12-21 14:14:22 -05:00
Vladimir Mandic 20eb54beb4 update 2022-12-04 14:14:05 -05:00
Vladimir Mandic e8301c5277 update 2022-12-04 13:23:41 -05:00
Vladimir Mandic fba823ba50 update tfjs 2022-12-01 14:56:40 -05:00
Vladimir Mandic a1cb6de1e8 1.7.7 2022-12-01 14:55:47 -05:00
Vladimir Mandic fb3836019f update dependencies 2022-11-12 11:54:00 -05:00
Vladimir Mandic 15ae496f40 update release 2022-10-18 07:23:49 -04:00
Vladimir Mandic 0009d1bc34 1.7.6 2022-10-18 07:23:04 -04:00
Vladimir Mandic adc4b3a11d update dependencies 2022-10-18 07:10:40 -04:00
Sohaib Ahmed 7e5a1289ff
Fix face angles (yaw, pitch, & roll) accuracy (#130)
Previouly derived aforementioned angles correctly seemed inaccurate and somewhat unusable (given their output was in radians). This update uses the a person's mesh positions, and chooses specific points for accurate results. It also adds directionality of the movements (_e.g. pitching head backwards is a negative result, as is rolling head to the left).

The webcam.js file has also been updated to showcase the correct output in degrees (reducing potential user confusion)

Comitter: Sohaib Ahmed <sohaibi.ahmed@icloud.com>

Co-authored-by: Sophia Glisch <sophiaglisch@Sophias-MacBook-Pro.local>
2022-10-18 07:09:35 -04:00
Vladimir Mandic cd2c553737 update tfjs 2022-10-14 08:01:39 -04:00
Vladimir Mandic a433fc0681 1.7.5 2022-10-09 13:42:45 -04:00
Vladimir Mandic f9902b0459 update readme 2022-10-09 13:42:38 -04:00
Vladimir Mandic bd5ab6bb0f update 2022-10-09 13:41:11 -04:00
Vladimir Mandic 96fed4f123 update tfjs 2022-10-09 13:40:33 -04:00
Vladimir Mandic 0cbfd9b01b update dependencies 2022-09-29 10:38:14 -04:00
Vladimir Mandic dea225bbeb
Create FUNDING.yml 2022-09-26 09:39:08 -04:00
Vladimir Mandic 602e86cbec add node-wasm demo 2022-09-25 16:40:42 -04:00
Vladimir Mandic 00bf49b24f 1.7.4 2022-09-25 16:39:22 -04:00
Vladimir Mandic fa33c1281c improve face compare performance 2022-09-14 08:18:51 -04:00
Vladimir Mandic 7f613367a3 update tfjs and typescript 2022-09-04 15:18:07 -04:00
Vladimir Mandic 4d65f459f9 update tfjs 2022-08-24 08:21:15 -04:00
Vladimir Mandic d28e5d2142 1.7.3 2022-08-24 08:20:11 -04:00
Vladimir Mandic 6aeb292453 refresh release 2022-08-23 08:26:07 -04:00
Vladimir Mandic 289faf17f2 1.7.2 2022-08-23 08:25:42 -04:00
Vladimir Mandic 7a6f7d96b7 document and remove optional dependencies 2022-08-23 08:21:20 -04:00
Vladimir Mandic 870eebedfa update dependencies 2022-08-22 13:17:39 -04:00
Vladimir Mandic 1ed702f713 update readme 2022-08-16 20:25:26 -04:00
Nina Egger b2a988e436
update readme 2022-08-03 15:14:56 -04:00
Vladimir Mandic 5c38676a83 update build platform 2022-07-29 09:24:51 -04:00
Vladimir Mandic bac0ef10cf update readme 2022-07-26 07:27:52 -04:00
Vladimir Mandic 8baef0ef68 update links 2022-07-25 08:38:52 -04:00
Vladimir Mandic c5dbb9d4e9 release build 2022-07-25 08:23:57 -04:00
Vladimir Mandic a8021dc2a3 1.7.1 2022-07-25 08:21:02 -04:00
Vladimir Mandic f946780bab refactor dependencies 2022-07-25 08:20:59 -04:00
Vladimir Mandic 8e7061a9aa full rebuild 2022-05-24 07:18:59 -04:00
Vladimir Mandic cd904ca5dd 1.6.11 2022-05-24 07:18:51 -04:00
Vladimir Mandic 496779fee2 1.6.10 2022-05-24 07:17:40 -04:00
Vladimir Mandic 4ba4a99ee1 update tfjs 2022-05-24 07:16:42 -04:00
Vladimir Mandic 31170e750b update changelog 2022-05-18 08:36:24 -04:00
Vladimir Mandic 5f58cd376d update tfjs 2022-05-18 08:36:05 -04:00
Vladimir Mandic 07eb00d7d6 1.6.9 2022-05-18 08:21:59 -04:00
Vladimir Mandic a1f7a0841f update libraries 2022-05-09 08:12:24 -04:00
Vladimir Mandic 49a594a59b 1.6.8 2022-05-09 08:11:31 -04:00
Vladimir Mandic 3b3ab219dc update dependencies 2022-04-09 09:48:06 -04:00
Vladimir Mandic 2fce7338dc exclude impossible detected face boxes 2022-04-05 07:38:11 -04:00
Vladimir Mandic 6cafeafba1 update tfjs 2022-04-01 09:16:17 -04:00
Vladimir Mandic d0f1349a23 1.6.7 2022-04-01 09:15:45 -04:00
abdemirza cdb0e485f8
fixed typo error (#97)
Co-authored-by: Abuzar Mirza <abdermiza@gmail.com>
2022-03-10 06:48:14 -05:00
Vladimir Mandic 5bcc4d2a73 update changelog 2022-03-07 13:17:54 -05:00
Vladimir Mandic 92008ed6f4 update tfjs and ts 2022-03-07 13:17:31 -05:00
Vladimir Mandic c1b38f99fe 1.6.6 2022-03-04 16:48:47 -05:00
Vladimir Mandic 0c5251c219 toolkit refresh 2022-02-07 09:43:35 -05:00
Vladimir Mandic fcf61e5c30 1.6.5 2022-02-07 09:41:55 -05:00
Vladimir Mandic 8c7e21b1c9 update tfjs and expand readme 2022-01-14 10:04:13 -05:00
Vladimir Mandic 2841969df8 1.6.4 2022-01-14 09:54:19 -05:00
Vladimir Mandic 39b137ed63 add node with wasm build target 2022-01-06 07:59:13 -05:00
Vladimir Mandic c53becfc67 1.6.3 2022-01-06 07:58:05 -05:00
Vladimir Mandic fd427cce39 update lint 2022-01-01 07:55:12 -05:00
Vladimir Mandic 43805b50c6 update demos 2022-01-01 07:52:40 -05:00
Vladimir Mandic fc18d89ab6 1.6.2 2022-01-01 07:51:51 -05:00
Vladimir Mandic 0de113080c update 2021-12-27 10:52:58 -05:00
Vladimir Mandic 471ddb7549 update 2021-12-14 15:42:06 -05:00
Vladimir Mandic 70991235df update tfjs 2021-12-09 14:22:22 -05:00
Vladimir Mandic c07be32e26 1.6.1 2021-12-09 14:20:24 -05:00
Vladimir Mandic 936ecba7ec update build 2021-12-06 21:43:06 -05:00
Vladimir Mandic 63476fcbc0 rebuild 2021-12-06 06:34:50 -05:00
Vladimir Mandic 62da12758f update 2021-12-03 11:32:42 -05:00
Vladimir Mandic bd4d5935fe update 2021-12-03 11:28:27 -05:00
Vladimir Mandic 118fbaba4d release preview 2021-12-01 17:21:12 -05:00
Vladimir Mandic e70d9bb18b switch to custom tfjs and new typedefs 2021-12-01 15:37:52 -05:00
Vladimir Mandic f1a2ef34a5 rebuild 2021-12-01 07:51:57 -05:00
Vladimir Mandic e7fd0efd27 1.5.8 2021-11-30 13:17:15 -05:00
Vladimir Mandic eb5501c672 update tfjs 2021-10-28 13:58:21 -04:00
Vladimir Mandic 8b304fa3d4 1.5.7 2021-10-28 13:56:38 -04:00
Vladimir Mandic 1824a62efb update readme 2021-10-23 09:52:51 -04:00
Vladimir Mandic bd2317d42e update tfjs to 3.10.0 2021-10-22 09:06:43 -04:00
Vladimir Mandic 1def723c7b 1.5.6 2021-10-22 09:01:27 -04:00
Vladimir Mandic d78dd3aae1 update dependencies and stricter linting rules 2021-10-19 08:04:24 -04:00
Vladimir Mandic 461e074993 1.5.5 2021-10-19 07:54:26 -04:00
Vladimir Mandic 1d30a9f816 rebuild 2021-09-30 13:45:23 -04:00
Vladimir Mandic fcbfc8589a allow backend change in demo via url params 2021-09-30 13:43:15 -04:00
Vladimir Mandic c7b2c65c97 add node-match demo 2021-09-29 13:03:02 -04:00
Vladimir Mandic 1b4580dd6e fix face matcher 2021-09-29 09:32:30 -04:00
Vladimir Mandic fdddee7101 1.5.4 2021-09-29 09:31:42 -04:00
Vladimir Mandic aee959f464 update build platform and typedoc template 2021-09-18 18:38:13 -04:00
Vladimir Mandic f70e5615b4 update release 2021-09-16 08:31:45 -04:00
Vladimir Mandic 4ba43e08ae 1.5.3 2021-09-16 08:30:53 -04:00
Vladimir Mandic c3049e7c29 simplify tfjs imports 2021-09-16 08:30:50 -04:00
Vladimir Mandic e2609a0ef2 update sourcemaps 2021-09-11 11:14:57 -04:00
Vladimir Mandic d13586f549 reduce bundle size 2021-09-11 11:11:38 -04:00
Vladimir Mandic 519e346f02 enable webgl uniforms 2021-09-10 10:24:33 -04:00
Vladimir Mandic efb307d230 1.5.2 2021-09-10 10:22:09 -04:00
Vladimir Mandic 47f2b53e92 update dependencies 2021-09-08 13:57:03 -04:00
Vladimir Mandic 9b810d8028 redesign build platform 2021-09-08 13:51:28 -04:00
Vladimir Mandic f48cbda416 1.5.1 2021-09-08 13:50:47 -04:00
Vladimir Mandic ac172b8be5 update dependencies 2021-09-05 17:06:09 -04:00
Vladimir Mandic 2c8c8c2c1c update tfjs 3.9.0 2021-08-31 12:21:57 -04:00
Vladimir Mandic 9fb3029211 1.4.2 2021-08-31 12:21:05 -04:00
Vladimir Mandic 225192d18d update dependencies 2021-08-10 08:19:49 -04:00
Vladimir Mandic 8dab959446 update 2021-07-29 09:18:21 -04:00
596 changed files with 20368 additions and 236785 deletions

148
.build.json Normal file
View File

@ -0,0 +1,148 @@
{
"log": {
"enabled": false,
"debug": false,
"console": true,
"output": "build.log"
},
"profiles": {
"production": ["compile", "typings", "typedoc", "lint", "changelog"],
"development": ["serve", "watch", "compile"]
},
"clean": {
"locations": ["dist/*", "typedoc/*", "types/lib/src"]
},
"lint": {
"locations": [ "src/" ],
"rules": { }
},
"changelog": {
"log": "CHANGELOG.md"
},
"serve": {
"sslKey": "cert/https.key",
"sslCrt": "cert/https.crt",
"httpPort": 8000,
"httpsPort": 8001,
"documentRoot": ".",
"defaultFolder": "demo",
"defaultFile": "index.html"
},
"build": {
"global": {
"target": "es2018",
"treeShaking": true,
"ignoreAnnotations": true,
"sourcemap": false,
"banner": { "js": "/*\n Face-API\n homepage: <https://github.com/vladmandic/face-api>\n author: <https://github.com/vladmandic>'\n*/\n" }
},
"targets": [
{
"name": "tfjs/browser/tf-version",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-version.ts",
"output": "dist/tfjs.version.js"
},
{
"name": "tfjs/node/cpu",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/cpu",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/node/gpu",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node-gpu.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/gpu",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node-gpu.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/node/wasm",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node-wasm.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/wasm",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node-wasm.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/browser/esm/nobundle",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-browser.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/browser/esm/nobundle",
"platform": "browser",
"format": "esm",
"input": "src/index.ts",
"output": "dist/face-api.esm-nobundle.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/browser/esm/bundle",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-browser.ts",
"output": "dist/tfjs.esm.js"
},
{
"name": "faceapi/browser/iife/bundle",
"platform": "browser",
"format": "iife",
"globalName": "faceapi",
"minify": true,
"input": "src/index.ts",
"output": "dist/face-api.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/browser/esm/bundle",
"platform": "browser",
"format": "esm",
"sourcemap": true,
"input": "src/index.ts",
"output": "dist/face-api.esm.js",
"typings": "types/lib",
"typedoc": "typedoc",
"external": ["@tensorflow"]
}
]
},
"watch": {
"enabled": true,
"locations": [ "src/**" ]
},
"typescript": {
"allowJs": false
}
}

View File

@ -7,7 +7,7 @@
"es2020": true
},
"parser": "@typescript-eslint/parser",
"parserOptions": { "ecmaVersion": 2020 },
"parserOptions": { "ecmaVersion": "latest" },
"plugins": [
"@typescript-eslint"
],
@ -17,7 +17,6 @@
"plugin:import/warnings",
"plugin:node/recommended",
"plugin:promise/recommended",
"plugin:json/recommended-with-comments",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"airbnb-base"
@ -29,8 +28,11 @@
"@typescript-eslint/ban-ts-comment": "off",
"@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/no-var-requires": "off",
"@typescript-eslint/no-empty-object-type": "off",
"@typescript-eslint/no-require-imports": "off",
"camelcase": "off",
"class-methods-use-this": "off",
"default-param-last": "off",
"dot-notation": "off",
"func-names": "off",
"guard-for-in": "off",
@ -56,6 +58,7 @@
"no-restricted-syntax": "off",
"no-return-assign": "off",
"no-underscore-dangle": "off",
"no-promise-executor-return": "off",
"node/no-missing-import": ["error", { "tryExtensions": [".js", ".json", ".ts"] }],
"node/no-unpublished-import": "off",
"node/no-unpublished-require": "off",

13
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1,13 @@
# These are supported funding model platforms
github: [vladmandic]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

1
.gitignore vendored
View File

@ -1,3 +1,2 @@
node_modules
pnpm-lock.yaml

View File

@ -2,3 +2,4 @@ node_modules
pnpm-lock.yaml
typedoc
test
types/lib

6
.npmrc
View File

@ -1 +1,5 @@
force = true
force=true
production=true
legacy-peer-deps=true
strict-peer-dependencies=false
node-options='--no-deprecation'

3
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,3 @@
{
"typescript.tsdk": "node_modules/typescript/lib"
}

View File

@ -1,14 +1,164 @@
# @vladmandic/face-api
Version: **1.4.1**
Description: **FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS**
Version: **1.7.15**
Description: **FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS**
Author: **Vladimir Mandic <mandic00@live.com>**
License: **MIT** </LICENSE>
Repository: **<git+https://github.com/vladmandic/face-api.git>**
Author: **Vladimir Mandic <mandic00@live.com>**
License: **MIT**
Repository: **<https://github.com/vladmandic/face-api>**
## Changelog
### **1.7.15** 2025/02/05 mandic00@live.com
### **origin/master** 2024/09/10 mandic00@live.com
### **1.7.14** 2024/09/10 mandic00@live.com
- rebuild
- merge pull request #188 from rebser/master
- fixing leaking eventhandlers when using htmlcanvaselement
- rebuild types
- rebuild
### **1.7.13** 2024/01/17 mandic00@live.com
- merge pull request #186 from khwalkowicz/master
- feat: enable noimplicitany
### **release: 1.7.12** 2023/06/12 mandic00@live.com
### **1.7.12** 2023/06/12 mandic00@live.com
### **1.7.11** 2023/05/08 mandic00@live.com
### **1.7.10** 2023/03/21 mandic00@live.com
- change typedefs
### **1.7.9** 2023/01/29 mandic00@live.com
### **1.7.8** 2023/01/06 mandic00@live.com
### **1.7.7** 2022/12/01 mandic00@live.com
### **1.7.6** 2022/10/18 mandic00@live.com
- fix face angles (yaw, pitch, & roll) accuracy (#130)
### **1.7.5** 2022/10/09 mandic00@live.com
- create funding.yml
- add node-wasm demo
### **1.7.4** 2022/09/25 mandic00@live.com
- improve face compare performance
### **1.7.3** 2022/08/24 mandic00@live.com
- refresh release
### **1.7.2** 2022/08/23 mandic00@live.com
- document and remove optional dependencies
### **release: 1.7.1** 2022/07/25 mandic00@live.com
### **1.7.1** 2022/07/25 mandic00@live.com
- refactor dependencies
- full rebuild
### **1.6.11** 2022/05/24 mandic00@live.com
### **1.6.10** 2022/05/24 mandic00@live.com
### **1.6.9** 2022/05/18 mandic00@live.com
### **1.6.8** 2022/05/09 mandic00@live.com
- exclude impossible detected face boxes
### **1.6.7** 2022/04/01 mandic00@live.com
- fixed typo error (#97)
### **1.6.6** 2022/03/04 mandic00@live.com
### **1.6.5** 2022/02/07 mandic00@live.com
### **1.6.4** 2022/01/14 mandic00@live.com
- add node with wasm build target
### **1.6.3** 2022/01/06 mandic00@live.com
### **1.6.2** 2022/01/01 mandic00@live.com
### **1.6.1** 2021/12/09 mandic00@live.com
- rebuild
- release preview
- switch to custom tfjs and new typedefs
- rebuild
### **1.5.8** 2021/11/30 mandic00@live.com
### **1.5.7** 2021/10/28 mandic00@live.com
### **1.5.6** 2021/10/22 mandic00@live.com
### **release: 1.5.5** 2021/10/19 mandic00@live.com
### **1.5.5** 2021/10/19 mandic00@live.com
- allow backend change in demo via url params
- add node-match demo
- fix face matcher
### **1.5.4** 2021/09/29 mandic00@live.com
### **1.5.3** 2021/09/16 mandic00@live.com
- simplify tfjs imports
- reduce bundle size
- enable webgl uniforms
### **1.5.2** 2021/09/10 mandic00@live.com
- redesign build platform
### **1.5.1** 2021/09/08 mandic00@live.com
### **1.4.2** 2021/08/31 mandic00@live.com
### **release: 1.4.1** 2021/07/29 mandic00@live.com
### **1.4.1** 2021/07/29 mandic00@live.com
@ -107,111 +257,61 @@ Repository: **<git+https://github.com/vladmandic/face-api.git>**
- add badges
- optimize for npm
### **0.30.6** 2021/03/08 mandic00@live.com
- 0.30.6
- added typings for face angle
- disable landmark printing
### **0.30.5** 2021/03/07 mandic00@live.com
- 0.30.5
- enabled live demo on gitpages
### **0.30.4** 2021/03/07 mandic00@live.com
- 0.30.4
- added face angle calculations
- added documentation
- package update
### **0.30.3** 2021/03/04 mandic00@live.com
### **0.30.2** 2021/02/26 mandic00@live.com
### **0.30.1** 2021/02/25 mandic00@live.com
### **0.13.3** 2021/02/21 mandic00@live.com
- 0.30.3
- 0.30.2
- 0.30.1
- 0.13.3
- added note-cpu target
- merge pull request #39 from xemle/feature/node-cpu
- add node-cpu build for non supported systems of libtensorflow
### **0.13.2** 2021/02/20 mandic00@live.com
### **0.13.1** 2021/02/20 mandic00@live.com
### **0.12.10** 2021/02/20 mandic00@live.com
- 0.13.2
- 0.13.1
- 0.12.10
- exception handling
- 0.12.9
- exception handling
- 0.12.8
- exception handling
### **0.12.9** 2021/02/20 mandic00@live.com
### **0.12.8** 2021/02/20 mandic00@live.com
### **0.12.7** 2021/02/17 mandic00@live.com
- 0.12.7
### **0.12.6** 2021/02/13 mandic00@live.com
### **0.12.5** 2021/02/12 mandic00@live.com
### **0.12.4** 2021/02/06 mandic00@live.com
### **0.12.3** 2021/02/06 mandic00@live.com
### **0.12.2** 2021/02/02 mandic00@live.com
- 0.12.6
- 0.12.5
- 0.12.4
- 0.12.3
- 0.12.2
### **update for tfjs 3.0.0** 2021/01/29 mandic00@live.com
### **0.12.1** 2021/01/29 mandic00@live.com
- 0.12.1
- rebuild
### **0.11.6** 2021/01/24 mandic00@live.com
- 0.11.6
- add check for null face descriptor
- merge pull request #34 from patrickhulce/patch-1
- fix: return empty descriptor for zero-sized faces
### **0.11.5** 2021/01/22 mandic00@live.com
### **0.11.4** 2021/01/22 mandic00@live.com
### **0.11.3** 2021/01/20 mandic00@live.com
- 0.11.5
- 0.11.4
- 0.11.3
- fix typo
- enable full minification
### **0.11.2** 2021/01/12 mandic00@live.com
- 0.11.2
- full rebuild
### **0.11.1** 2021/01/10 mandic00@live.com
- 0.11.1
- added live webcam demo
### **0.10.2** 2021/01/03 mandic00@live.com
- 0.10.2
- ts linting
- version bump
### **0.10.1** 2020/12/23 mandic00@live.com
- 0.10.1
- full re-lint and typings generation
- rebuild

194
README.md
View File

@ -53,18 +53,18 @@ Example can be accessed directly using Git pages using URL:
### NodeJS
Three NodeJS examples are:
NodeJS examples are:
- `/demo/node-simple.js`:
Simplest possible NodeJS demo for FaceAPI in under 30 lines of JavaScript code
- `/demo/node.js`:
Regular usage of `FaceAPI` from `NodeJS`
Using `TFJS` native methods to load images without external dependencies
- `/demo/node-canvas.js`:
Regular usage of `FaceAPI` from `NodeJS`
- `/demo/node-canvas.js` and `/demo/node-image.js`:
Using external `canvas` module to load images
Which also allows for image drawing and saving inside `NodeJS` environment
- `/demo/node-wasm.js`:
Same as `node-canvas`, but using `WASM` backend in `NodeJS` environment
Because why not :)
- `/demo/node-match.js`:
Simple demo that compares face similarity from a given image
to a second image or list of images in a folder
- `/demo/node-multiprocess.js`:
Multiprocessing showcase that uses pool of worker processes
(`node-multiprocess-worker.js`)
@ -104,8 +104,11 @@ Three NodeJS examples are:
2021-03-14 08:42:09 STATE: Main: worker exit: 1888019 0
```
Note that `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu`
must be installed before using NodeJS example
### NodeJS Notes
- Supported NodeJS versions are **14** up to **22**
NodeJS version **23** and higher are not supported due to incompatibility with TensorFlow/JS
- `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu`
must be installed before using any **NodeJS** examples
<br><hr><br>
@ -133,8 +136,6 @@ Simply include latest version of `FaceAPI` directly from a CDN in your HTML:
*without* TFJS pre-bundled
- `dist/face-api.node-gpu.js`: CommonJS format for server-side NodeJS execution
*without* TFJS pre-bundled and optimized for CUDA GPU acceleration
- `dist/face-api.node-cpu.js`: CommonJS format for server-side NodeJS execution
*without* TFJS pre-bundled and using JS engine for platforms where tensorflow binary library version is not available
Defaults are:
@ -260,7 +261,7 @@ If you want to GPU Accelerated execution in NodeJS, you must have CUDA libraries
Then install appropriate version of `FaceAPI`:
```shell
npm install @tensorflow/tfjs-node
npm install @tensorflow/tfjs-node-gpu
npm install @vladmandic/face-api
```
@ -271,18 +272,24 @@ And then use with:
const faceapi = require('@vladmandic/face-api/dist/face-api.node-gpu.js'); // this loads face-api version with correct bindings for tfjs-node-gpu
```
If you want to use `FaceAPI` in a NodeJS on platforms where NodeJS binary libraries are not supported, you can use JavaScript CPU backend.
If you want to use `FaceAPI` in a NodeJS on platforms where **tensorflow** binary libraries are not supported, you can use NodeJS **WASM** backend.
```shell
npm install @tensorflow/tfjs
npm install @tensorflow/tfjs-backend-wasm
npm install @vladmandic/face-api
```
And then use with:
```js
const tf = require('@tensorflow/tfjs')
const faceapi = require('@vladmandic/face-api/dist/face-api.node-cpu.js');
const tf = require('@tensorflow/tfjs');
const wasm = require('@tensorflow/tfjs-backend-wasm');
const faceapi = require('@vladmandic/face-api/dist/face-api.node-wasm.js'); // use this when using face-api in dev mode
wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/');
await tf.setBackend('wasm');
await tf.ready();
...
```
If you want to use graphical functions inside NodeJS,
@ -310,12 +317,14 @@ faceapi.env.monkeyPatch({ Canvas, Image, ImageData })
## Weights
Pretrained models and their weights are includes in `./model`.
Pretrained models and their weights are included in `./model`.
<br><hr><br>
## Test & Dev Web Server
To install development dependencies, use `npm install --production=false`
Built-in test&dev web server can be started using
```shell
@ -327,38 +336,47 @@ By default it starts HTTP server on port 8000 and HTTPS server on port 8001 and
- <https://localhost:8001/demo/index.html>
- <https://localhost:8001/demo/webcam.html>
```json
2021-07-29 09:03:02 INFO: @vladmandic/face-api version 1.3.1
2021-07-29 09:03:02 INFO: User: vlado Platform: linux Arch: x64 Node: v16.5.0
2021-07-29 09:03:02 INFO: Build: file startup all target: es2018
2021-07-29 09:03:02 STATE: HTTP server listening: 8000
2021-07-29 09:03:02 STATE: HTTP2 server listening: 8001
2021-07-29 09:03:02 STATE: Build for: node type: tfjs: { imports: 1, importBytes: 143, outputBytes: 1327, outputFiles: 'dist/tfjs.esm.js' }
2021-07-29 09:03:02 STATE: Build for: node type: node: { imports: 162, importBytes: 234303, outputBytes: 175090, outputFiles: 'dist/face-api.node.js' }
2021-07-29 09:03:02 STATE: Monitoring: [ 'package.json', 'demo', 'src', [length]: 3 ]
2021-07-29 09:03:02 STATE: Build for: nodeGPU type: tfjs: { imports: 1, importBytes: 147, outputBytes: 1335, outputFiles: 'dist/tfjs.esm.js' }
2021-07-29 09:03:02 STATE: Build for: nodeGPU type: node: { imports: 162, importBytes: 234311, outputBytes: 175098, outputFiles: 'dist/face-api.node-gpu.js' }
2021-07-29 09:03:02 STATE: Build for: nodeCPU type: tfjs: { imports: 1, importBytes: 138, outputBytes: 1326, outputFiles: 'dist/tfjs.esm.js' }
2021-07-29 09:03:02 STATE: Build for: nodeCPU type: node: { imports: 162, importBytes: 234302, outputBytes: 175089, outputFiles: 'dist/face-api.node-cpu.js' }
2021-07-29 09:03:02 STATE: Build for: browserNoBundle type: tfjs: { imports: 1, importBytes: 276, outputBytes: 277, outputFiles: 'dist/tfjs.esm.js' }
2021-07-29 09:03:02 STATE: Build for: browserNoBundle type: esm: { imports: 162, importBytes: 233253, outputBytes: 168850, outputFiles: 'dist/face-api.esm-nobundle.js' }
2021-07-29 09:03:03 STATE: Build for: browserBundle type: tfjs: { modules: 1352, moduleBytes: 4369745, imports: 7, importBytes: 276, outputBytes: 2362345, outputFiles: 'dist/tfjs.esm.js' }
2021-07-29 09:03:03 STATE: Build for: browserBundle type: iife: { imports: 162, importBytes: 2595321, outputBytes: 2482864, outputFiles: 'dist/face-api.js' }
2021-07-29 09:03:04 STATE: Build for: browserBundle type: esm: { imports: 162, importBytes: 2595321, outputBytes: 2360761, outputFiles: 'dist/face-api.esm.js' }
2021-07-29 09:03:23 DATA: GET/2.0 200 text/html; charset=utf-8 1269 /demo/webcam.html ::ffff:192.168.0.200
2021-07-29 09:03:23 DATA: GET/2.0 200 text/javascript; charset=utf-8 7971 /demo/webcam.js ::ffff:192.168.0.200
2021-07-29 09:03:23 DATA: GET/2.0 200 text/javascript; charset=utf-8 2360761 /dist/face-api.esm.js ::ffff:192.168.0.200
2021-07-29 09:03:23 DATA: GET/2.0 200 application/json; charset=utf-8 28233 /model/ssd_mobilenetv1_model-weights_manifest.json ::ffff:192.168.0.200
2021-07-29 09:03:23 DATA: GET/2.0 200 image/x-icon 5063 /favicon.ico ::ffff:192.168.0.200
2021-07-29 09:03:23 DATA: GET/2.0 200 application/octet-stream 5616957 /model/ssd_mobilenetv1_model.bin ::ffff:192.168.0.200
2021-07-29 09:03:24 DATA: GET/2.0 200 application/json; charset=utf-8 8392 /model/age_gender_model-weights_manifest.json ::ffff:192.168.0.200
2021-07-29 09:03:24 DATA: GET/2.0 200 application/octet-stream 429708 /model/age_gender_model.bin ::ffff:192.168.0.200
2021-07-29 09:03:24 DATA: GET/2.0 200 application/json; charset=utf-8 8485 /model/face_landmark_68_model-weights_manifest.json ::ffff:192.168.0.200
2021-07-29 09:03:24 DATA: GET/2.0 200 application/octet-stream 356840 /model/face_landmark_68_model.bin ::ffff:192.168.0.200
2021-07-29 09:03:24 DATA: GET/2.0 200 application/json; charset=utf-8 19615 /model/face_recognition_model-weights_manifest.json ::ffff:192.168.0.200
2021-07-29 09:03:24 DATA: GET/2.0 200 application/octet-stream 6444032 /model/face_recognition_model.bin ::ffff:192.168.0.200
2021-07-29 09:03:25 DATA: GET/2.0 200 application/json; charset=utf-8 6980 /model/face_expression_model-weights_manifest.json ::ffff:192.168.0.200
2021-07-29 09:03:25 DATA: GET/2.0 200 application/octet-stream 329468 /model/face_expression_model.bin ::ffff:192.168.0.200
```js
2022-01-14 09:56:19 INFO: @vladmandic/face-api version 1.6.4
2022-01-14 09:56:19 INFO: User: vlado Platform: linux Arch: x64 Node: v17.2.0
2022-01-14 09:56:19 INFO: Application: { name: '@vladmandic/face-api', version: '1.6.4' }
2022-01-14 09:56:19 INFO: Environment: { profile: 'development', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true }
2022-01-14 09:56:19 INFO: Toolchain: { build: '0.6.7', esbuild: '0.14.11', typescript: '4.5.4', typedoc: '0.22.10', eslint: '8.6.0' }
2022-01-14 09:56:19 INFO: Build: { profile: 'development', steps: [ 'serve', 'watch', 'compile' ] }
2022-01-14 09:56:19 STATE: WebServer: { ssl: false, port: 8000, root: '.' }
2022-01-14 09:56:19 STATE: WebServer: { ssl: true, port: 8001, root: '.', sslKey: 'build/cert/https.key', sslCrt: 'build/cert/https.crt' }
2022-01-14 09:56:19 STATE: Watch: { locations: [ 'src/**', 'README.md', 'src/**', 'src/**' ] }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 1276 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234787, outputBytes: 175203 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 1296 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234807, outputBytes: 175219 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 1367 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234878, outputBytes: 175294 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 1662 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 2172, outputBytes: 811 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234322, outputBytes: 169437 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 2172, outputBytes: 2444105 }
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 2677616, outputBytes: 1252572 }
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 2677616, outputBytes: 2435063 }
2022-01-14 09:56:20 INFO: Listening...
...
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/html', size: 1047, url: '/', remote: '::1' }
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 6919, url: '/index.js', remote: '::1' }
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 2435063, url: '/dist/face-api.esm.js', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 4125244, url: '/dist/face-api.esm.js.map', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 3219, url: '/model/tiny_face_detector_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 193321, url: '/model/tiny_face_detector_model.bin', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 28233, url: '/model/ssd_mobilenetv1_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 5616957, url: '/model/ssd_mobilenetv1_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8392, url: '/model/age_gender_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 429708, url: '/model/age_gender_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8485, url: '/model/face_landmark_68_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 356840, url: '/model/face_landmark_68_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 19615, url: '/model/face_recognition_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 6444032, url: '/model/face_recognition_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 6980, url: '/model/face_expression_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 329468, url: '/model/face_expression_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'image/jpeg', size: 144516, url: '/sample1.jpg', remote: '::1' }
```
<br><hr><br>
@ -382,38 +400,41 @@ cd face-api
Then install all dependencies and run rebuild:
```shell
npm install
npm install --production=false
npm run build
```
Build process uses script `build.js` that creates optimized build for each target:
Build process uses `@vladmandic/build` module that creates optimized build for each target:
```text
> @vladmandic/face-api@1.0.2 build
> rimraf dist/* types/* typedoc/* && node server/build.js
```
```js
> @vladmandic/face-api@1.7.1 build /home/vlado/dev/face-api
> node build.js
```json
2021-07-29 09:01:33 INFO: @vladmandic/face-api version 1.3.1
2021-07-29 09:01:33 INFO: User: vlado Platform: linux Arch: x64 Node: v16.5.0
2021-07-29 09:01:33 INFO: Toolchain: tfjs: 3.8.0 esbuild 0.12.16; typescript 4.3.5; typedoc: 0.21.4 eslint: 7.31.0
2021-07-29 09:01:33 INFO: Build: file startup all target: es2018
2021-07-29 09:01:33 STATE: Build for: node type: tfjs: { imports: 1, importBytes: 143, outputBytes: 1327, outputFiles: 'dist/tfjs.esm.js' }
2021-07-29 09:01:33 STATE: Build for: node type: node: { imports: 162, importBytes: 234303, outputBytes: 175090, outputFiles: 'dist/face-api.node.js' }
2021-07-29 09:01:33 STATE: Build for: nodeGPU type: tfjs: { imports: 1, importBytes: 147, outputBytes: 1335, outputFiles: 'dist/tfjs.esm.js' }
2021-07-29 09:01:33 STATE: Build for: nodeGPU type: node: { imports: 162, importBytes: 234311, outputBytes: 175098, outputFiles: 'dist/face-api.node-gpu.js' }
2021-07-29 09:01:33 STATE: Build for: nodeCPU type: tfjs: { imports: 1, importBytes: 138, outputBytes: 1326, outputFiles: 'dist/tfjs.esm.js' }
2021-07-29 09:01:33 STATE: Build for: nodeCPU type: node: { imports: 162, importBytes: 234302, outputBytes: 175089, outputFiles: 'dist/face-api.node-cpu.js' }
2021-07-29 09:01:33 STATE: Build for: browserNoBundle type: tfjs: { imports: 1, importBytes: 276, outputBytes: 277, outputFiles: 'dist/tfjs.esm.js' }
2021-07-29 09:01:33 STATE: Build for: browserNoBundle type: esm: { imports: 162, importBytes: 233253, outputBytes: 168850, outputFiles: 'dist/face-api.esm-nobundle.js' }
2021-07-29 09:01:33 STATE: Build for: browserBundle type: tfjs: { modules: 1352, moduleBytes: 4369745, imports: 7, importBytes: 276, outputBytes: 2362345, outputFiles: 'dist/tfjs.esm.js' }
2021-07-29 09:01:34 STATE: Build for: browserBundle type: iife: { imports: 162, importBytes: 2595321, outputBytes: 2482864, outputFiles: 'dist/face-api.js' }
2021-07-29 09:01:35 STATE: Build for: browserBundle type: esm: { imports: 162, importBytes: 2595321, outputBytes: 2360761, outputFiles: 'dist/face-api.esm.js' }
2021-07-29 09:01:35 INFO: Running Linter: [ 'server/', 'demo/', 'src/', 'test/', [length]: 4 ]
2021-07-29 09:01:55 INFO: Linter complete: files: 183 errors: 0 warnings: 0
2021-07-29 09:01:55 INFO: Compile typings: [ 'src/index.ts', [length]: 1 ]
2021-07-29 09:01:59 INFO: Update Change log: [ '/home/vlado/dev/face-api/CHANGELOG.md', [length]: 1 ]
2021-07-29 09:01:59 INFO: Generate TypeDocs: [ 'src/index.ts', [length]: 1 ]
2022-07-25 08:21:05 INFO: Application: { name: '@vladmandic/face-api', version: '1.7.1' }
2022-07-25 08:21:05 INFO: Environment: { profile: 'production', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true }
2022-07-25 08:21:05 INFO: Toolchain: { build: '0.7.7', esbuild: '0.14.50', typescript: '4.7.4', typedoc: '0.23.9', eslint: '8.20.0' }
2022-07-25 08:21:05 INFO: Build: { profile: 'production', steps: [ 'clean', 'compile', 'typings', 'typedoc', 'lint', 'changelog' ] }
2022-07-25 08:21:05 STATE: Clean: { locations: [ 'dist/*', 'typedoc/*', 'types/lib/src' ] }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 614 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234137, outputBytes: 85701 }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 618 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234141, outputBytes: 85705 }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 670 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234193, outputBytes: 85755 }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 400 }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 910, outputBytes: 527 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234050, outputBytes: 82787 }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 910, outputBytes: 1184871 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 1418394, outputBytes: 1264631 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 1418394, outputBytes: 1264150 }
2022-07-25 08:21:07 STATE: Typings: { input: 'src/index.ts', output: 'types/lib', files: 93 }
2022-07-25 08:21:09 STATE: TypeDoc: { input: 'src/index.ts', output: 'typedoc', objects: 154, generated: true }
2022-07-25 08:21:13 STATE: Lint: { locations: [ 'src/' ], files: 174, errors: 0, warnings: 0 }
2022-07-25 08:21:14 STATE: ChangeLog: { repository: 'https://github.com/vladmandic/face-api', branch: 'master', output: 'CHANGELOG.md' }
2022-07-25 08:21:14 INFO: Done...
2022-07-25 08:21:14 STATE: Copy: { input: 'types/lib/dist/tfjs.esm.d.ts' }
2022-07-25 08:21:15 STATE: API-Extractor: { succeeeded: true, errors: 0, warnings: 417 }
2022-07-25 08:21:15 INFO: FaceAPI Build complete...
```
<br><hr><br>
@ -428,18 +449,14 @@ Build process uses script `build.js` that creates optimized build for each targe
## Note
This is updated **face-api.js** with latest available TensorFlow/JS as the original is not compatible with **tfjs 2.0+**.
This is updated **face-api.js** with latest available TensorFlow/JS as the original is not compatible with **tfjs >=2.0**.
Forked from [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2** which was released on March 22nd, 2020
Currently based on **`TensorFlow/JS` 3.6.1**
*Why?* I needed a FaceAPI that does not cause version conflict with newer versions of TensorFlow
And since the original FaceAPI was open-source, I've released this version as well
*Why?* I needed FaceAPI that does not cause version conflict with newer versions of TensorFlow
And since original FaceAPI was open-source, I've released this version as well
Changes ended up being too large for a simple pull request
and it ended up being a full-fledged version on its own
Plus many features were added since original inception
Changes ended up being too large for a simple pull request and it ended up being a full-fledged version on its own
Plus many features were added since the original inception
Although a lot of work has gone into this version of `FaceAPI` and it will continue to be maintained,
at this time it is completely superseded by my newer library `Human` which covers the same use cases,
@ -454,14 +471,18 @@ but extends it with newer AI models, additional detection details, compatibility
Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2**:
- Compatible with `TensorFlow/JS 2.0+ & 3.0+`
- Compatible with `TensorFlow/JS 2.0+, 3.0+ and 4.0+`
Currently using **`TensorFlow/JS` 4.16**
Original `face-api.js` is based on `TFJS` **1.7.4**
- Compatible with `WebGL`, `CPU` and `WASM` TFJS Browser backends
- Compatible with both `tfjs-node` and `tfjs-node-gpu` TFJS NodeJS backends
- Updated all type castings for TypeScript type checking to `TypeScript 4.3`
- Updated all type castings for TypeScript type checking to `TypeScript 5.3`
- Switched bundling from `UMD` to `ESM` + `CommonJS` with fallback to `IIFE`
Resulting code is optimized per-platform instead of being universal
Fully tree shakable when imported as an `ESM` module
Browser bundle process uses `ESBuild` instead of `Rollup`
- Added separate `face-api` versions with `tfjs` pre-bundled and without `tfjs`
When using `-nobundle` version, user can load any version of `tfjs` manually
- Typescript build process now targets `ES2018` and instead of dual `ES5`/`ES6`
Resulting code is clean ES2018 JavaScript without polyfills
- Removed old tests, docs, examples
@ -477,6 +498,7 @@ Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) vers
- Added `face angle` calculations that returns `roll`, `yaw` and `pitch`
- Added `typdoc` automatic API specification generation during build
- Added `changelog` automatic generation during build
- New process to generate **TypeDocs** bundle using API-Extractor
<br>

38
api-extractor.json Normal file
View File

@ -0,0 +1,38 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
"mainEntryPointFilePath": "types/lib/src/index.d.ts",
"bundledPackages": ["@tensorflow/tfjs-core", "@tensorflow/tfjs-converter", "@types/offscreencanvas"],
"compiler": {
"skipLibCheck": false
},
"newlineKind": "lf",
"dtsRollup": {
"enabled": true,
"untrimmedFilePath": "types/face-api.d.ts"
},
"docModel": { "enabled": false },
"tsdocMetadata": {
"enabled": false
},
"apiReport": { "enabled": false },
"messages": {
"compilerMessageReporting": {
"default": {
"logLevel": "warning"
}
},
"extractorMessageReporting": {
"default": {
"logLevel": "warning"
},
"ae-missing-release-tag": {
"logLevel": "none"
}
},
"tsdocMessageReporting": {
"default": {
"logLevel": "warning"
}
}
}
}

77
build.js Normal file
View File

@ -0,0 +1,77 @@
const fs = require('fs');
const log = require('@vladmandic/pilogger');
const Build = require('@vladmandic/build').Build;
const APIExtractor = require('@microsoft/api-extractor');
const regEx = [
{ search: 'types="@webgpu/types/dist"', replace: 'path="../src/types/webgpu.d.ts"' },
{ search: 'types="offscreencanvas"', replace: 'path="../src/types/offscreencanvas.d.ts"' },
];
function copyFile(src, dst) {
if (!fs.existsSync(src)) {
log.warn('Copy:', { input: src, output: dst });
return;
}
log.state('Copy:', { input: src, output: dst });
const buffer = fs.readFileSync(src);
fs.writeFileSync(dst, buffer);
}
function writeFile(str, dst) {
log.state('Write:', { output: dst });
fs.writeFileSync(dst, str);
}
function regExFile(src, entries) {
if (!fs.existsSync(src)) {
log.warn('Filter:', { src });
return;
}
log.state('Filter:', { input: src });
for (const entry of entries) {
const buffer = fs.readFileSync(src, 'UTF-8');
const lines = buffer.split(/\r?\n/);
const out = [];
for (const line of lines) {
if (line.includes(entry.search)) out.push(line.replace(entry.search, entry.replace));
else out.push(line);
}
fs.writeFileSync(src, out.join('\n'));
}
}
const apiIgnoreList = ['ae-forgotten-export', 'ae-unresolved-link', 'tsdoc-param-tag-missing-hyphen'];
async function main() {
// run production build
const build = new Build();
await build.run('production');
// patch tfjs typedefs
log.state('Copy:', { input: 'types/lib/dist/tfjs.esm.d.ts' });
copyFile('types/lib/dist/tfjs.esm.d.ts', 'dist/tfjs.esm.d.ts');
// run api-extractor to create typedef rollup
const extractorConfig = APIExtractor.ExtractorConfig.loadFileAndPrepare('api-extractor.json');
const extractorResult = APIExtractor.Extractor.invoke(extractorConfig, {
localBuild: true,
showVerboseMessages: false,
messageCallback: (msg) => {
msg.handled = true;
if (msg.logLevel === 'none' || msg.logLevel === 'verbose' || msg.logLevel === 'info') return;
if (msg.sourceFilePath?.includes('/node_modules/')) return;
if (apiIgnoreList.reduce((prev, curr) => prev || msg.messageId.includes(curr), false)) return;
log.data('API', { level: msg.logLevel, category: msg.category, id: msg.messageId, file: msg.sourceFilePath, line: msg.sourceFileLine, text: msg.text });
},
});
log.state('API-Extractor:', { succeeeded: extractorResult.succeeded, errors: extractorResult.errorCount, warnings: extractorResult.warningCount });
regExFile('types/face-api.d.ts', regEx);
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm-nobundle.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-gpu.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-wasm.d.ts');
log.info('FaceAPI Build complete...');
}
main();

View File

@ -11,7 +11,7 @@
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
<script src="./index.js" type="module"></script>
</head>
<body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0;">
<body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0; overflow-x: hidden;">
<div id="log"></div>
</body>
</html>

View File

@ -1,25 +1,25 @@
import * as faceapi from '../dist/face-api.esm.js';
/**
* FaceAPI Demo for Browsers
* Loaded via `index.html`
*/
import * as faceapi from '../dist/face-api.esm.js'; // use when in dev mode
// import * as faceapi from '@vladmandic/face-api'; // use when downloading face-api as npm
// configuration options
const modelPath = '../model/'; // path to model folder that will be loaded using http
// const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http
// const modelPath = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model/'; // path to model folder that will be loaded using http
const imgSize = 800; // maximum image size in pixels
const minScore = 0.3; // minimum score
const maxResults = 10; // maximum number of results to return
const samples = ['sample1.jpg', 'sample2.jpg', 'sample3.jpg', 'sample4.jpg', 'sample5.jpg', 'sample6.jpg']; // sample images to be loaded using http
// helper function to pretty-print json object to string
function str(json) {
let text = '<font color="lightblue">';
text += json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '';
text += '</font>';
return text;
}
const str = (json) => (json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '');
// helper function to print strings to html document as a log
function log(...txt) {
// eslint-disable-next-line no-console
console.log(...txt);
console.log(...txt); // eslint-disable-line no-console
const div = document.getElementById('log');
if (div) div.innerHTML += `<br>${txt}`;
}
@ -33,11 +33,9 @@ function faces(name, title, id, data) {
canvas.style.position = 'absolute';
canvas.style.left = `${img.offsetLeft}px`;
canvas.style.top = `${img.offsetTop}px`;
// @ts-ignore
canvas.width = img.width;
// @ts-ignore
canvas.height = img.height;
const ctx = canvas.getContext('2d');
const ctx = canvas.getContext('2d', { willReadFrequently: true });
if (!ctx) return;
// draw title
ctx.font = '1rem sans-serif';
@ -53,6 +51,7 @@ function faces(name, title, id, data) {
ctx.beginPath();
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
ctx.stroke();
// draw text labels
ctx.globalAlpha = 1;
ctx.fillText(`${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 18);
ctx.fillText(`${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 2);
@ -72,8 +71,7 @@ function faces(name, title, id, data) {
// helper function to draw processed image and its results
function print(title, img, data) {
// eslint-disable-next-line no-console
console.log('Results:', title, img, data);
console.log('Results:', title, img, data); // eslint-disable-line no-console
const el = new Image();
el.id = Math.floor(Math.random() * 100000).toString();
el.src = img;
@ -96,7 +94,7 @@ async function image(url) {
const canvas = document.createElement('canvas');
canvas.height = img.height;
canvas.width = img.width;
const ctx = canvas.getContext('2d');
const ctx = canvas.getContext('2d', { willReadFrequently: true });
if (ctx) ctx.drawImage(img, 0, 0, img.width, img.height);
// return generated canvas to be used by tfjs during detection
resolve(canvas);
@ -111,18 +109,23 @@ async function main() {
log('FaceAPI Test');
// if you want to use wasm backend location for wasm binaries must be specified
// await faceapi.tf.setWasmPaths('../node_modules/@tensorflow/tfjs-backend-wasm/dist/');
// await faceapi.tf.setBackend('wasm');
// await faceapi.tf?.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${faceapi.tf.version_core}/dist/`);
// await faceapi.tf?.setBackend('wasm');
// log(`WASM SIMD: ${await faceapi.tf?.env().getAsync('WASM_HAS_SIMD_SUPPORT')} Threads: ${await faceapi.tf?.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') ? 'Multi' : 'Single'}`);
// default is webgl backend
await faceapi.tf.setBackend('webgl');
await faceapi.tf.ready();
// tfjs optimizations
if (faceapi.tf?.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) faceapi.tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
// check version
log(`Version: FaceAPI ${str(faceapi?.version.faceapi || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
log(`Version: FaceAPI ${str(faceapi?.version || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
// load face-api models
@ -140,16 +143,9 @@ async function main() {
const engine = await faceapi.tf.engine();
log(`TF Engine State: ${str(engine.state)}`);
// const testT = faceapi.tf.tensor([0]);
// const testF = testT.toFloat();
// console.log(testT.print(), testF.print());
// testT.dispose();
// testF.dispose();
// loop through all images and try to process them
log(`Start processing: ${samples.length} images ...<br>`);
for (const img of samples) {
// new line
document.body.appendChild(document.createElement('br'));
// load and resize image
const canvas = await image(img);
@ -163,7 +159,7 @@ async function main() {
.withFaceDescriptors()
.withAgeAndGender();
// print results to screen
print('TinyFace Detector', img, dataTinyYolo);
print('TinyFace:', img, dataTinyYolo);
// actual model execution
const dataSSDMobileNet = await faceapi
.detectAllFaces(canvas, optionsSSDMobileNet)
@ -172,11 +168,9 @@ async function main() {
.withFaceDescriptors()
.withAgeAndGender();
// print results to screen
print('SSD MobileNet', img, dataSSDMobileNet);
print('SSDMobileNet:', img, dataSSDMobileNet);
} catch (err) {
log(`Image: ${img} Error during processing ${str(err)}`);
// eslint-disable-next-line no-console
console.error(err);
}
}
}

View File

@ -1,15 +1,20 @@
// @ts-nocheck
/**
* FaceAPI Demo for NodeJS
* - Uses external library [canvas](https://www.npmjs.com/package/canvas) to decode image
* - Loads image from provided param
* - Outputs results to console
*/
// canvas library provides full canvas (load/draw/write) functionality for nodejs
// must be installed manually as it just a demo dependency and not actual face-api dependency
const canvas = require('canvas'); // eslint-disable-line node/no-missing-require
const fs = require('fs');
const process = require('process');
const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const process = require('process');
const log = require('@vladmandic/pilogger');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars, @typescript-eslint/no-unused-vars
const tf = require('@tensorflow/tfjs-node');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const canvas = require('canvas');
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
const modelPathRoot = '../model';
const imgPathRoot = './demo'; // modify to include your sample images
@ -52,11 +57,9 @@ async function main() {
faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
await faceapi.tf.setBackend('tensorflow');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
log.state(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf?.getBackend()}`);
log.info('Loading FaceAPI models');
const modelPath = path.join(__dirname, modelPathRoot);
@ -80,7 +83,7 @@ async function main() {
for (const face of result) print(face);
}
const t1 = process.hrtime.bigint();
log.info('Processed', numImages, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
log.info('Processed', numImages, 'images in', Math.trunc(Number((t1 - t0).toString()) / 1000 / 1000), 'ms');
} else {
const param = process.argv[2];
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {

35
demo/node-face-compare.js Normal file
View File

@ -0,0 +1,35 @@
/**
* FaceAPI demo that loads two images and finds similarity most prominant face in each image
*/
const fs = require('fs');
const tf = require('@tensorflow/tfjs-node');
const faceapi = require('../dist/face-api.node');
let optionsSSDMobileNet;
const getDescriptors = async (imageFile) => {
const buffer = fs.readFileSync(imageFile);
const tensor = tf.node.decodeImage(buffer, 3);
const faces = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet)
.withFaceLandmarks()
.withFaceDescriptors();
tf.dispose(tensor);
return faces.map((face) => face.descriptor);
};
const main = async (file1, file2) => {
console.log('input images:', file1, file2); // eslint-disable-line no-console
await tf.ready();
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model');
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.5, maxResults: 1 });
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
const desc1 = await getDescriptors(file1);
const desc2 = await getDescriptors(file2);
const distance = faceapi.euclideanDistance(desc1[0], desc2[0]); // only compare first found face in each image
console.log('distance between most prominant detected faces:', distance); // eslint-disable-line no-console
console.log('similarity between most prominant detected faces:', 1 - distance); // eslint-disable-line no-console
};
main('demo/sample1.jpg', 'demo/sample2.jpg');

View File

@ -1,11 +1,18 @@
/**
* FaceAPI Demo for NodeJS
* - Uses external library [@canvas/image](https://www.npmjs.com/package/@canvas/image) to decode image
* - Loads image from provided param
* - Outputs results to console
*/
// @canvas/image can decode jpeg, png, webp
// must be installed manually as it just a demo dependency and not actual face-api dependency
const image = require('@canvas/image'); // eslint-disable-line node/no-missing-require
const fs = require('fs');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const tf = require('@tensorflow/tfjs-node');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const image = require('@canvas/image'); // @canvas/image can decode jpeg, png, webp
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger');
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
const modelPath = 'model/';
const imageFile = 'demo/sample1.jpg';
@ -41,7 +48,7 @@ async function main() {
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
log.data('results:', result);
log.data('results:', result.length);
}
main();

84
demo/node-match.js Normal file
View File

@ -0,0 +1,84 @@
/**
* FaceAPI Demo for NodeJS
* - Analyzes face descriptors from source (image file or folder containing multiple image files)
* - Analyzes face descriptor from target
* - Finds best match
*/
const fs = require('fs');
const path = require('path');
const log = require('@vladmandic/pilogger');
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
let optionsSSDMobileNet;
const minConfidence = 0.1;
const distanceThreshold = 0.5;
const modelPath = 'model';
const labeledFaceDescriptors = [];
async function initFaceAPI() {
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults: 1 });
}
async function getDescriptors(imageFile) {
const buffer = fs.readFileSync(imageFile);
const tensor = tf.node.decodeImage(buffer, 3);
const faces = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors();
tf.dispose(tensor);
return faces.map((face) => face.descriptor);
}
async function registerImage(inputFile) {
if (!inputFile.toLowerCase().endsWith('jpg') && !inputFile.toLowerCase().endsWith('png') && !inputFile.toLowerCase().endsWith('gif')) return;
log.data('Registered:', inputFile);
const descriptors = await getDescriptors(inputFile);
for (const descriptor of descriptors) {
const labeledFaceDescriptor = new faceapi.LabeledFaceDescriptors(inputFile, [descriptor]);
labeledFaceDescriptors.push(labeledFaceDescriptor);
}
}
async function findBestMatch(inputFile) {
const matcher = new faceapi.FaceMatcher(labeledFaceDescriptors, distanceThreshold);
const descriptors = await getDescriptors(inputFile);
const matches = [];
for (const descriptor of descriptors) {
const match = await matcher.findBestMatch(descriptor);
matches.push(match);
}
return matches;
}
async function main() {
log.header();
if (process.argv.length !== 4) {
log.error(process.argv[1], 'Expected <source image or folder> <target image>');
process.exit(1);
}
await initFaceAPI();
log.info('Input:', process.argv[2]);
if (fs.statSync(process.argv[2]).isFile()) {
await registerImage(process.argv[2]); // register image
} else if (fs.statSync(process.argv[2]).isDirectory()) {
const dir = fs.readdirSync(process.argv[2]);
for (const f of dir) await registerImage(path.join(process.argv[2], f)); // register all images in a folder
}
log.info('Comparing:', process.argv[3], 'Descriptors:', labeledFaceDescriptors.length);
if (labeledFaceDescriptors.length > 0) {
const bestMatch = await findBestMatch(process.argv[3]); // find best match to all registered images
log.data('Match:', bestMatch);
} else {
log.warn('No registered faces');
}
}
main();

View File

@ -1,14 +1,16 @@
// @ts-nocheck
/**
* FaceAPI Demo for NodeJS
* - Used by `node-multiprocess.js`
*/
const fs = require('fs');
const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger');
// workers actual import tfjs and faceapi modules
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const tf = require('@tensorflow/tfjs-node');
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
// options used by faceapi
const modelPathRoot = '../model';
@ -53,7 +55,7 @@ async function main() {
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
log.state('Worker: PID:', process.pid, `TensorFlow/JS ${faceapi.tf.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf.getBackend()}`);
log.state('Worker: PID:', process.pid, `TensorFlow/JS ${faceapi.tf.version_core} FaceAPI ${faceapi.version} Backend: ${faceapi.tf.getBackend()}`);
// and load and initialize facepi models
const modelPath = path.join(__dirname, modelPathRoot);

View File

@ -1,11 +1,14 @@
// @ts-nocheck
/**
* FaceAPI Demo for NodeJS
* - Starts multiple worker processes and uses them as worker pool to process all input images
* - Images are enumerated in main process and sent for processing to worker processes via ipc
*/
const fs = require('fs');
const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features
const child_process = require('child_process');
// note that main process import faceapi or tfjs at all
// note that main process does not need to import faceapi or tfjs at all as processing is done in a worker process
const imgPathRoot = './demo'; // modify to include your sample images
const numWorkers = 4; // how many workers will be started
@ -33,14 +36,14 @@ function waitCompletion() {
if (activeWorkers > 0) setImmediate(() => waitCompletion());
else {
t[1] = process.hrtime.bigint();
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(parseInt(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(parseInt(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(parseInt(t[1] - t[2]) / numImages / 1000000), 'ms');
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(Number(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(Number(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(Number(t[1] - t[2]) / numImages / 1000000), 'ms');
}
}
function measureLatency() {
t[3] = process.hrtime.bigint();
const latencyInitialization = Math.trunc(parseInt(t[2] - t[0]) / 1000 / 1000);
const latencyRoundTrip = Math.trunc(parseInt(t[3] - t[2]) / 1000 / 1000);
const latencyInitialization = Math.trunc(Number(t[2] - t[0]) / 1000 / 1000);
const latencyRoundTrip = Math.trunc(Number(t[3] - t[2]) / 1000 / 1000);
log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip);
}

31
demo/node-simple.js Normal file
View File

@ -0,0 +1,31 @@
/**
* FaceAPI Demo for NodeJS
* - Loads image
* - Outputs results to console
*/
const fs = require('fs');
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
async function main() {
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model'); // load models from a specific patch
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
await faceapi.nets.ageGenderNet.loadFromDisk('model');
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
await faceapi.nets.faceExpressionNet.loadFromDisk('model');
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.1, maxResults: 10 }); // set model options
const buffer = fs.readFileSync('demo/sample1.jpg'); // load jpg image as binary
const decodeT = faceapi.tf.node.decodeImage(buffer, 3); // decode binary buffer to rgb tensor
const expandT = faceapi.tf.expandDims(decodeT, 0); // add batch dimension to tensor
const result = await faceapi.detectAllFaces(expandT, options) // run detection
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
faceapi.tf.dispose([decodeT, expandT]); // dispose tensors to avoid memory leaks
console.log({ result }); // eslint-disable-line no-console
}
main();

View File

@ -1,97 +1,53 @@
// @ts-nocheck
/**
* FaceAPI Demo for NodeJS using WASM
* - Loads WASM binaries from external CDN
* - Loads image
* - Outputs results to console
*/
const fs = require('fs');
const process = require('process');
const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars, @typescript-eslint/no-unused-vars
const image = require('@canvas/image'); // eslint-disable-line node/no-missing-require
const tf = require('@tensorflow/tfjs');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
require('@tensorflow/tfjs-backend-wasm');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars
const canvas = require('canvas');
const faceapi = require('../dist/face-api.node-cpu.js'); // this is equivalent to '@vladmandic/faceapi'
const wasm = require('@tensorflow/tfjs-backend-wasm');
const faceapi = require('../dist/face-api.node-wasm.js'); // use this when using face-api in dev mode
const modelPathRoot = '../model';
const imgPathRoot = './demo'; // modify to include your sample images
const minConfidence = 0.15;
const maxResults = 5;
let optionsSSDMobileNet;
async function image(input) {
const img = await canvas.loadImage(input);
const c = canvas.createCanvas(img.width, img.height);
const ctx = c.getContext('2d');
ctx.drawImage(img, 0, 0, img.width, img.height);
// const out = fs.createWriteStream('test.jpg');
// const stream = c.createJPEGStream({ quality: 0.6, progressive: true, chromaSubsampling: true });
// stream.pipe(out);
return c;
async function readImage(imageFile) {
const buffer = fs.readFileSync(imageFile); // read image from disk
const canvas = await image.imageFromBuffer(buffer); // decode to canvas
const imageData = image.getImageData(canvas); // read decoded image data from canvas
const tensor = tf.tidy(() => { // create tensor from image data
const data = tf.tensor(Array.from(imageData?.data || []), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array and flip to height x width
const channels = tf.split(data, 4, 2); // split rgba to channels
const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb
const squeeze = tf.squeeze(rgb); // move extra dim from the end of tensor and use it as batch number instead
return squeeze;
});
console.log(`Image: ${imageFile} [${canvas.width} x ${canvas.height} Tensor: ${tensor.shape}, Size: ${tensor.size}`); // eslint-disable-line no-console
return tensor;
}
async function detect(tensor) {
const result = await faceapi
.detectAllFaces(tensor, optionsSSDMobileNet)
async function main() {
wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/', true);
await tf.setBackend('wasm');
await tf.ready();
console.log(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf.getBackend()}`); // eslint-disable-line no-console
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model'); // load models from a specific patch
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
await faceapi.nets.ageGenderNet.loadFromDisk('model');
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
await faceapi.nets.faceExpressionNet.loadFromDisk('model');
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.1, maxResults: 10 }); // set model options
const tensor = await readImage('demo/sample1.jpg');
const t0 = performance.now();
const result = await faceapi.detectAllFaces(tensor, options) // run detection
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
return result;
}
function print(face) {
const expression = Object.entries(face.expressions).reduce((acc, val) => ((val[1] > acc[1]) ? val : acc), ['', 0]);
const box = [face.alignedRect._box._x, face.alignedRect._box._y, face.alignedRect._box._width, face.alignedRect._box._height];
const gender = `Gender: ${Math.round(100 * face.genderProbability)}% ${face.gender}`;
log.data(`Detection confidence: ${Math.round(100 * face.detection._score)}% ${gender} Age: ${Math.round(10 * face.age) / 10} Expression: ${Math.round(100 * expression[1])}% ${expression[0]} Box: ${box.map((a) => Math.round(a))}`);
}
async function main() {
log.header();
log.info('FaceAPI single-process test');
faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
await faceapi.tf.setBackend('wasm');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
log.info('Loading FaceAPI models');
const modelPath = path.join(__dirname, modelPathRoot);
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
if (process.argv.length !== 3) {
const t0 = process.hrtime.bigint();
const dir = fs.readdirSync(imgPathRoot);
let numImages = 0;
for (const img of dir) {
if (!img.toLocaleLowerCase().endsWith('.jpg')) continue;
numImages += 1;
const c = await image(path.join(imgPathRoot, img));
const result = await detect(c);
log.data('Image:', img, 'Detected faces:', result.length);
for (const face of result) print(face);
}
const t1 = process.hrtime.bigint();
log.info('Processed', numImages, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
} else {
const param = process.argv[2];
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {
const c = await image(param);
const result = await detect(c);
log.data('Image:', param, 'Detected faces:', result.length);
for (const face of result) print(face);
}
}
tf.dispose(tensor); // dispose tensors to avoid memory leaks
const t1 = performance.now();
console.log('Time', t1 - t0); // eslint-disable-line no-console
console.log('Result', result); // eslint-disable-line no-console
}
main();

View File

@ -1,21 +1,25 @@
// @ts-nocheck
/**
* FaceAPI Demo for NodeJS
* - Uses external library [node-fetch](https://www.npmjs.com/package/node-fetch) to load images via http
* - Loads image from provided param
* - Outputs results to console
*/
const fs = require('fs');
const process = require('process');
const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const fetch = require('node-fetch').default;
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const tf = require('@tensorflow/tfjs-node');
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
const modelPathRoot = '../model';
const imgPathRoot = './demo'; // modify to include your sample images
const minConfidence = 0.15;
const maxResults = 5;
let optionsSSDMobileNet;
let fetch; // dynamically imported later
async function image(input) {
// read input image file and create tensor to be used for processing
@ -89,12 +93,13 @@ async function main() {
log.header();
log.info('FaceAPI single-process test');
// eslint-disable-next-line node/no-extraneous-import
fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-missing-import
await faceapi.tf.setBackend('tensorflow');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version} Backend: ${faceapi.tf?.getBackend()}`);
log.info('Loading FaceAPI models');
const modelPath = path.join(__dirname, modelPathRoot);
@ -105,7 +110,7 @@ async function main() {
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
if (process.argv.length !== 3) {
if (process.argv.length !== 4) {
const t0 = process.hrtime.bigint();
const dir = fs.readdirSync(imgPathRoot);
for (const img of dir) {
@ -117,7 +122,7 @@ async function main() {
tensor.dispose();
}
const t1 = process.hrtime.bigint();
log.info('Processed', dir.length, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
log.info('Processed', dir.length, 'images in', Math.trunc(Number((t1 - t0)) / 1000 / 1000), 'ms');
} else {
const param = process.argv[2];
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {

View File

@ -1,8 +1,14 @@
import * as faceapi from '../dist/face-api.esm.js';
/**
* FaceAPI Demo for Browsers
* Loaded via `webcam.html`
*/
import * as faceapi from '../dist/face-api.esm.js'; // use when in dev mode
// import * as faceapi from '@vladmandic/face-api'; // use when downloading face-api as npm
// configuration options
const modelPath = '../model/'; // path to model folder that will be loaded using http
// const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http
// const modelPath = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model/'; // path to model folder that will be loaded using http
const minScore = 0.2; // minimum score
const maxResults = 5; // maximum number of results to return
let optionsSSDMobileNet;
@ -17,15 +23,14 @@ function str(json) {
// helper function to print strings to html document as a log
function log(...txt) {
// eslint-disable-next-line no-console
console.log(...txt);
console.log(...txt); // eslint-disable-line no-console
const div = document.getElementById('log');
if (div) div.innerHTML += `<br>${txt}`;
}
// helper function to draw detected faces
function drawFaces(canvas, data, fps) {
const ctx = canvas.getContext('2d');
const ctx = canvas.getContext('2d', { willReadFrequently: true });
if (!ctx) return;
ctx.clearRect(0, 0, canvas.width, canvas.height);
// draw title
@ -42,18 +47,18 @@ function drawFaces(canvas, data, fps) {
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
ctx.stroke();
ctx.globalAlpha = 1;
// const expression = person.expressions.sort((a, b) => Object.values(a)[0] - Object.values(b)[0]);
// draw text labels
const expression = Object.entries(person.expressions).sort((a, b) => b[1] - a[1]);
ctx.fillStyle = 'black';
ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 59);
ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 41);
ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 23);
ctx.fillText(`roll:${person.angle.roll.toFixed(3)} pitch:${person.angle.pitch.toFixed(3)} yaw:${person.angle.yaw.toFixed(3)}`, person.detection.box.x, person.detection.box.y - 5);
ctx.fillText(`roll:${person.angle.roll}° pitch:${person.angle.pitch}° yaw:${person.angle.yaw}°`, person.detection.box.x, person.detection.box.y - 5);
ctx.fillStyle = 'lightblue';
ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 60);
ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 42);
ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 24);
ctx.fillText(`roll:${person.angle.roll.toFixed(3)} pitch:${person.angle.pitch.toFixed(3)} yaw:${person.angle.yaw.toFixed(3)}`, person.detection.box.x, person.detection.box.y - 6);
ctx.fillText(`roll:${person.angle.roll}° pitch:${person.angle.pitch}° yaw:${person.angle.yaw}°`, person.detection.box.x, person.detection.box.y - 6);
// draw face points for each face
ctx.globalAlpha = 0.8;
ctx.fillStyle = 'lightblue';
@ -61,7 +66,6 @@ function drawFaces(canvas, data, fps) {
for (let i = 0; i < person.landmarks.positions.length; i++) {
ctx.beginPath();
ctx.arc(person.landmarks.positions[i].x, person.landmarks.positions[i].y, pointSize, 0, 2 * Math.PI);
// ctx.fillText(`${i}`, person.landmarks.positions[i].x + 4, person.landmarks.positions[i].y + 4);
ctx.fill();
}
}
@ -95,7 +99,6 @@ async function setupCamera() {
const canvas = document.getElementById('canvas');
if (!video || !canvas) return null;
let msg = '';
log('Setting up camera');
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https
if (!navigator.mediaDevices) {
@ -103,23 +106,19 @@ async function setupCamera() {
return null;
}
let stream;
const constraints = {
audio: false,
video: { facingMode: 'user', resizeMode: 'crop-and-scale' },
};
const constraints = { audio: false, video: { facingMode: 'user', resizeMode: 'crop-and-scale' } };
if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth };
else constraints.video.height = { ideal: window.innerHeight };
try {
stream = await navigator.mediaDevices.getUserMedia(constraints);
} catch (err) {
if (err.name === 'PermissionDeniedError' || err.name === 'NotAllowedError') msg = 'camera permission denied';
else if (err.name === 'SourceUnavailableError') msg = 'camera not available';
log(`Camera Error: ${msg}: ${err.message || err}`);
if (err.name === 'PermissionDeniedError' || err.name === 'NotAllowedError') log(`Camera Error: camera permission denied: ${err.message || err}`);
if (err.name === 'SourceUnavailableError') log(`Camera Error: camera not available: ${err.message || err}`);
return null;
}
// @ts-ignore
if (stream) video.srcObject = stream;
else {
if (stream) {
video.srcObject = stream;
} else {
log('Camera Error: stream empty');
return null;
}
@ -128,31 +127,23 @@ async function setupCamera() {
if (settings.deviceId) delete settings.deviceId;
if (settings.groupId) delete settings.groupId;
if (settings.aspectRatio) settings.aspectRatio = Math.trunc(100 * settings.aspectRatio) / 100;
log(`Camera active: ${track.label}`); // ${str(constraints)}
log(`Camera active: ${track.label}`);
log(`Camera settings: ${str(settings)}`);
canvas.addEventListener('click', () => {
// @ts-ignore
if (video && video.readyState >= 2) {
// @ts-ignore
if (video.paused) {
// @ts-ignore
video.play();
detectVideo(video, canvas);
} else {
// @ts-ignore
video.pause();
}
}
// @ts-ignore
log(`Camera state: ${video.paused ? 'paused' : 'playing'}`);
});
return new Promise((resolve) => {
video.onloadeddata = async () => {
// @ts-ignore
canvas.width = video.videoWidth;
// @ts-ignore
canvas.height = video.videoHeight;
// @ts-ignore
video.play();
detectVideo(video, canvas);
resolve(true);
@ -170,7 +161,6 @@ async function setupFaceAPI() {
await faceapi.nets.faceRecognitionNet.load(modelPath);
await faceapi.nets.faceExpressionNet.load(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
// check tf engine state
log(`Models loaded: ${str(faceapi.tf.engine().state.numTensors)} tensors`);
}
@ -180,19 +170,21 @@ async function main() {
log('FaceAPI WebCam Test');
// if you want to use wasm backend location for wasm binaries must be specified
// await faceapi.tf.setWasmPaths('../node_modules/@tensorflow/tfjs-backend-wasm/dist/');
// await faceapi.tf.setBackend('wasm');
// await faceapi.tf?.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${faceapi.tf.version_core}/dist/`);
// await faceapi.tf?.setBackend('wasm');
// log(`WASM SIMD: ${await faceapi.tf?.env().getAsync('WASM_HAS_SIMD_SUPPORT')} Threads: ${await faceapi.tf?.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') ? 'Multi' : 'Single'}`);
// default is webgl backend
await faceapi.tf.setBackend('webgl');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
// tfjs optimizations
if (faceapi.tf?.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) faceapi.tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
// check version
log(`Version: FaceAPI ${str(faceapi?.version.faceapi || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
// log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
log(`Version: FaceAPI ${str(faceapi?.version || '(not loaded)')} TensorFlow/JS ${str(faceapi.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi.tf?.getBackend() || '(not loaded)')}`);
await setupFaceAPI();
await setupCamera();

1
dist/face-api.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

1
dist/face-api.esm-nobundle.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

1
dist/face-api.esm.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

63765
dist/face-api.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

63756
dist/face-api.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

1
dist/face-api.node-gpu.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

1
dist/face-api.node-wasm.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

7
dist/face-api.node-wasm.js vendored Normal file

File diff suppressed because one or more lines are too long

1
dist/face-api.node.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

4707
dist/face-api.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

28
dist/tfjs.esm.d.ts vendored Normal file
View File

@ -0,0 +1,28 @@
/*
import '@tensorflow/tfjs-core';
import '@tensorflow/tfjs-core/dist/types';
import '@tensorflow/tfjs-core/dist/register_all_gradients';
import '@tensorflow/tfjs-core/dist/public/chained_ops/register_all_chained_ops';
import '@tensorflow/tfjs-data';
import '@tensorflow/tfjs-layers';
import '@tensorflow/tfjs-converter';
import '@tensorflow/tfjs-backend-cpu';
import '@tensorflow/tfjs-backend-webgl';
import '@tensorflow/tfjs-backend-wasm';
import '@tensorflow/tfjs-backend-webgpu';
*/
export declare const version: {
'tfjs-core': string;
'tfjs-backend-cpu': string;
'tfjs-backend-webgl': string;
'tfjs-data': string;
'tfjs-layers': string;
'tfjs-converter': string;
tfjs: string;
};
export { io, browser, image } from '@tensorflow/tfjs-core';
export { tensor, tidy, softmax, unstack, relu, add, conv2d, cast, zeros, concat, avgPool, stack, fill, transpose, tensor1d, tensor2d, tensor3d, tensor4d, maxPool, matMul, mul, sub, scalar } from '@tensorflow/tfjs-core';
export { div, pad, slice, reshape, slice3d, expandDims, depthwiseConv2d, separableConv2d, sigmoid, exp, tile, batchNorm, clipByValue } from '@tensorflow/tfjs-core';
export { ENV, Variable, Tensor, TensorLike, Rank, Tensor1D, Tensor2D, Tensor3D, Tensor4D, Tensor5D, NamedTensorMap } from '@tensorflow/tfjs-core';

61885
dist/tfjs.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

9
dist/tfjs.version.d.ts vendored Normal file
View File

@ -0,0 +1,9 @@
export declare const version: {
'tfjs-core': string;
'tfjs-backend-cpu': string;
'tfjs-backend-webgl': string;
'tfjs-data': string;
'tfjs-layers': string;
'tfjs-converter': string;
tfjs: string;
};

7
dist/tfjs.version.js vendored Normal file
View File

@ -0,0 +1,7 @@
/*
Face-API
homepage: <https://github.com/vladmandic/face-api>
author: <https://github.com/vladmandic>'
*/
var e="4.22.0";var s="4.22.0";var t="4.22.0";var n="4.22.0";var i="4.22.0";var w={tfjs:e,"tfjs-core":e,"tfjs-converter":s,"tfjs-backend-cpu":t,"tfjs-backend-webgl":n,"tfjs-backend-wasm":i};export{w as version};

View File

@ -1,12 +1,12 @@
{
"name": "@vladmandic/face-api",
"version": "1.4.1",
"version": "1.7.15",
"description": "FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS",
"sideEffects": false,
"main": "dist/face-api.node.js",
"module": "dist/face-api.esm.js",
"browser": "dist/face-api.esm.js",
"types": "types/index.d.ts",
"types": "types/face-api.d.ts",
"author": "Vladimir Mandic <mandic00@live.com>",
"bugs": {
"url": "https://github.com/vladmandic/face-api/issues"
@ -21,10 +21,10 @@
"url": "git+https://github.com/vladmandic/face-api.git"
},
"scripts": {
"start": "node --trace-warnings demo/node.js",
"dev": "node --trace-warnings server/serve.js",
"build": "rimraf dist/* types/* typedoc/* && node server/build.js",
"lint": "eslint src/**/* demo/*.js server/*.js",
"start": "node --no-warnings demo/node.js",
"build": "node build.js",
"dev": "build --profile development",
"lint": "eslint src/ demo/",
"test": "node --trace-warnings test/test-node.js",
"scan": "npx auditjs@latest ossi --dev --quiet"
},
@ -42,31 +42,38 @@
"tfjs"
],
"devDependencies": {
"@canvas/image": "^1.0.1",
"@tensorflow/tfjs": "^3.8.0",
"@tensorflow/tfjs-backend-wasm": "^3.8.0",
"@tensorflow/tfjs-node": "^3.8.0",
"@tensorflow/tfjs-node-gpu": "^3.8.0",
"@types/node": "^16.4.6",
"@typescript-eslint/eslint-plugin": "^4.28.5",
"@typescript-eslint/parser": "^4.28.5",
"@vladmandic/pilogger": "^0.2.17",
"canvas": "^2.8.0",
"chokidar": "^3.5.2",
"dayjs": "^1.10.6",
"esbuild": "^0.12.16",
"eslint": "^7.31.0",
"eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-import": "^2.23.4",
"eslint-plugin-json": "^3.0.0",
"@canvas/image": "^2.0.0",
"@microsoft/api-extractor": "^7.49.2",
"@tensorflow/tfjs": "^4.22.0",
"@tensorflow/tfjs-backend-cpu": "^4.22.0",
"@tensorflow/tfjs-backend-wasm": "^4.22.0",
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
"@tensorflow/tfjs-backend-webgpu": "4.22.0",
"@tensorflow/tfjs-converter": "^4.22.0",
"@tensorflow/tfjs-core": "^4.22.0",
"@tensorflow/tfjs-data": "^4.22.0",
"@tensorflow/tfjs-layers": "^4.22.0",
"@tensorflow/tfjs-node": "^4.22.0",
"@tensorflow/tfjs-node-gpu": "^4.22.0",
"@types/node": "^22.13.1",
"@types/offscreencanvas": "^2019.7.3",
"@typescript-eslint/eslint-plugin": "^8.5.0",
"@typescript-eslint/parser": "^8.5.0",
"@vladmandic/build": "^0.10.2",
"@vladmandic/pilogger": "^0.5.1",
"ajv": "^8.17.1",
"esbuild": "^0.24.2",
"eslint": "8.57.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-plugin-import": "^2.30.0",
"eslint-plugin-json": "^4.0.1",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^5.1.0",
"node-fetch": "^2.6.1",
"rimraf": "^3.0.2",
"eslint-plugin-promise": "^7.1.0",
"node-fetch": "^3.3.2",
"rimraf": "^6.0.1",
"seedrandom": "^3.0.5",
"simple-git": "^2.41.2",
"tslib": "^2.3.0",
"typedoc": "^0.21.4",
"typescript": "4.3.5"
"tslib": "^2.8.1",
"typedoc": "^0.27.6",
"typescript": "5.7.3"
}
}

View File

@ -1,260 +0,0 @@
/* eslint-disable import/no-extraneous-dependencies */
/* eslint-disable node/no-unpublished-require */
const esbuild = require('esbuild');
const ts = require('typescript');
const log = require('@vladmandic/pilogger');
const TypeDoc = require('typedoc');
const { ESLint } = require('eslint');
const tfjs = require('@tensorflow/tfjs/package.json');
const changelog = require('./changelog');
const banner = { js: `
/*
Face-API
homepage: <https://github.com/vladmandic/face-api>
author: <https://github.com/vladmandic>'
*/
` };
let td = null;
let eslint = null;
// tsc configuration
const tsconfig = {
noEmitOnError: false,
target: ts.ScriptTarget.ES2018,
module: ts.ModuleKind.ES2020,
// outFile: "dist/face-api.d.ts",
outDir: 'types/',
declaration: true,
emitDeclarationOnly: true,
emitDecoratorMetadata: true,
experimentalDecorators: true,
skipLibCheck: true,
strictNullChecks: true,
baseUrl: './',
paths: {
tslib: ['node_modules/tslib/tslib.d.ts'],
},
};
// common configuration
const lintLocations = ['server/', 'demo/', 'src/', 'test/'];
// common configuration
const common = {
banner,
minifyWhitespace: false,
minifyIdentifiers: false,
minifySyntax: false,
bundle: true,
sourcemap: true,
metafile: true,
logLevel: 'error',
target: 'es2018',
// tsconfig: './tsconfig.json',
};
const targets = {
node: {
tfjs: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/tfjs/tf-node.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.node.js',
external: ['@tensorflow'],
},
},
nodeGPU: {
tfjs: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/tfjs/tf-node-gpu.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.node-gpu.js',
external: ['@tensorflow'],
},
},
nodeCPU: {
tfjs: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/tfjs/tf-node-cpu.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.node-cpu.js',
external: ['@tensorflow'],
},
},
browserNoBundle: {
tfjs: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/tfjs/tf-browser.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['fs', 'buffer', 'util', 'os', '@tensorflow'],
},
esm: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.esm-nobundle.js',
external: ['fs', 'buffer', 'util', 'os', '@tensorflow', 'tfjs.esm.js'],
},
},
browserBundle: {
tfjs: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/tfjs/tf-browser.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['fs', 'buffer', 'util', 'os'],
},
iife: {
platform: 'browser',
format: 'iife',
globalName: 'faceapi',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.js',
external: ['fs', 'buffer', 'util', 'os'],
},
esm: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.esm.js',
external: ['fs', 'buffer', 'util', 'os'],
},
},
};
async function getStats(json) {
const stats = {};
if (json && json.metafile.inputs && json.metafile.outputs) {
for (const [key, val] of Object.entries(json.metafile.inputs)) {
if (key.startsWith('node_modules')) {
stats.modules = (stats.modules || 0) + 1;
stats.moduleBytes = (stats.moduleBytes || 0) + val.bytes;
} else {
stats.imports = (stats.imports || 0) + 1;
stats.importBytes = (stats.importBytes || 0) + val.bytes;
}
}
const files = [];
for (const [key, val] of Object.entries(json.metafile.outputs)) {
if (!key.endsWith('.map')) {
files.push(key);
stats.outputBytes = (stats.outputBytes || 0) + val.bytes;
}
}
stats.outputFiles = files.join(', ');
}
return stats;
}
function typings(fileNames, options) {
log.info('Compile typings:', fileNames);
const program = ts.createProgram(fileNames, options);
const emit = program.emit();
const diag = ts
.getPreEmitDiagnostics(program)
.concat(emit.diagnostics);
for (const info of diag) {
// @ts-ignore
const msg = info.messageText.messageText || info.messageText;
if (msg.includes('package.json')) continue;
if (msg.includes('Expected 0 arguments, but got 1')) continue;
if (info.file) {
const pos = info.file.getLineAndCharacterOfPosition(info.start || 0);
log.error(`TSC: ${info.file.fileName} [${pos.line + 1},${pos.character + 1}]:`, msg);
} else {
log.error('TSC:', msg);
}
}
}
async function lint() {
log.info('Running Linter:', lintLocations);
if (!eslint) {
eslint = new ESLint();
}
const results = await eslint.lintFiles(lintLocations);
const errors = results.reduce((prev, curr) => prev += curr.errorCount, 0);
const warnings = results.reduce((prev, curr) => prev += curr.warningCount, 0);
log.info('Linter complete: files:', results.length, 'errors:', errors, 'warnings:', warnings);
if (errors > 0 || warnings > 0) {
const formatter = await eslint.loadFormatter('stylish');
const text = formatter.format(results);
log.warn(text);
}
}
async function typedoc(entryPoint) {
log.info('Generate TypeDocs:', entryPoint);
if (!td) {
td = new TypeDoc.Application();
td.options.addReader(new TypeDoc.TSConfigReader());
td.bootstrap({ entryPoints: entryPoint });
}
const project = td.convert();
const result = project ? await td.generateDocs(project, 'typedoc') : null;
if (result) log.warn('TypeDoc:', result);
}
// rebuild on file change
async function build(f, msg, dev = false) {
log.info('Build: file', msg, f, 'target:', common.target);
try {
// rebuild all target groups and types
for (const [targetGroupName, targetGroup] of Object.entries(targets)) {
for (const [targetName, targetOptions] of Object.entries(targetGroup)) {
// if triggered from watch mode, rebuild only browser bundle
// if ((require.main !== module) && (targetGroupName !== 'browserBundle')) continue;
// @ts-ignore
const meta = await esbuild.build({ ...common, ...targetOptions });
const stats = await getStats(meta);
log.state(`Build for: ${targetGroupName} type: ${targetName}:`, stats);
}
}
} catch (err) {
// catch errors and print where it occured
log.error('Build error', JSON.stringify(err.errors || err, null, 2));
if (require.main === module) process.exit(1);
}
if (!dev) { // only for prod builds, skipped for dev build
await lint(); // run linter
await typings(targets.browserBundle.esm.entryPoints, tsconfig); // generate typings
await changelog.update('../CHANGELOG.md'); // generate changelog
await typedoc(targets.browserBundle.esm.entryPoints); // generate typedoc
}
if (require.main === module) process.exit(0);
}
if (require.main === module) {
log.header();
log.info(`Toolchain: tfjs: ${tfjs.version} esbuild ${esbuild.version}; typescript ${ts.version}; typedoc: ${TypeDoc.Application.VERSION} eslint: ${ESLint.version}`);
build('all', 'startup');
} else {
exports.build = build;
}

View File

@ -1,58 +0,0 @@
const fs = require('fs');
const path = require('path');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const dayjs = require('dayjs');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const simpleGit = require('simple-git/promise');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const logger = require('@vladmandic/pilogger');
const app = require('../package.json');
const git = simpleGit();
let text = `# ${app.name}
Version: **${app.version}**
Description: **${app.description}**
Author: **${app.author}**
License: **${app.license}** </LICENSE>
Repository: **<${app.repository.url}>**
## Changelog
`;
async function update(f) {
const gitLog = await git.log();
// @ts-ignore
const log = gitLog.all.sort((a, b) => (new Date(b.date).getTime() - new Date(a.date).getTime()));
let previous = '';
const headings = [];
for (const l of log) {
const msg = l.message.toLowerCase();
if ((l.refs !== '') || msg.match(/^[0-99].[0-99].[0-99]/)) {
const dt = dayjs(l.date).format('YYYY/MM/DD');
let ver = msg.match(/[0-99].[0-99].[0-99]/) ? msg : l.refs;
ver = ver.replace('tag: v', '').replace('tag: ', 'release: ').split(',')[0];
const heading = `\n### **${ver}** ${dt} ${l.author_email}\n\n`;
if (!headings.includes(heading) && !ver.startsWith('tag')) {
headings.push(heading);
text += heading;
}
} else if ((msg.length > 2) && !msg.startsWith('update') && (previous !== msg)) {
previous = msg;
text += `- ${msg}\n`;
}
}
const name = path.join(__dirname, f);
fs.writeFileSync(name, text);
logger.info('Update Change log:', [name]);
}
if (require.main === module) {
update('../CHANGELOG.md');
} else {
exports.update = update;
}

View File

@ -1,31 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIFazCCA1OgAwIBAgIUKQKodDBJnuweJs5IcTyL4NIp3vgwDQYJKoZIhvcNAQEL
BQAwRTELMAkGA1UEBhMCVVMxEDAOBgNVBAgMB0Zsb3JpZGExDjAMBgNVBAcMBU1p
YW1pMRQwEgYDVQQKDAtAdmxhZG1hbmRpYzAeFw0yMDExMDcxNTE3NDNaFw0yMTEx
MDcxNTE3NDNaMEUxCzAJBgNVBAYTAlVTMRAwDgYDVQQIDAdGbG9yaWRhMQ4wDAYD
VQQHDAVNaWFtaTEUMBIGA1UECgwLQHZsYWRtYW5kaWMwggIiMA0GCSqGSIb3DQEB
AQUAA4ICDwAwggIKAoICAQDSC88PF8NyLkagK5mAZ/d739SOU16l2Cx3zE35zZQh
O29+1L4L+oMksLYipo+FMgtGO+MSzFsvGgKCs2sDSdfyoNSTZ3QaN4BAZ0sbq+wL
cke7yRBTM/XIGOQfhqq8yC2q8/zXwUbZg0UsCAxDGNwUr0Qlm829laIU/UN1KcYS
57Nebl1z05wMEvYmyl4JBAl9ozne7KS9DyW7jbrAXE8TaEy3+pY66kx5GG6v2+up
ScITGm4YPmPPlpOF1UjQloosgxdVa+fVp8aNCa/rf0JNO0Uhb3OKOZ+4kYmpfPn/
trwoKWAa6CV1uAJ+3zDkLMq1JNlrV4OMp1QvX0wzA47a/n466JMN9SFb0Ng5wf19
VOtT5Zu7chDStBudVjxlMDfUixvhvn4sjbaLNYR1fyWPoNXwr0KX2lpTP1QOzp9/
Sd0iiJ8RPfXn8Xo26MStu4I52CZjS7yEMgJGCLH/mgPuSbrHHYYrrrCPJgmQOZG2
TNMI+EqOwQvHh2ghdv7t7EEk4IslBk0QzufMXQ2WFXQ20nvj74mrmmiMuBcmonpR
0egA5/M18ZPLQxYu0Q86NUr4XHtAG1fq+n8pseQ7Avy6Gk6HRiezCbB7TJ9rnNeu
jie1TDajC6W7rx0VF7hcxkIrDgNgnYcjXUV2hMx1lo4fIoWkL3nJJVEthMVIcJOX
EwIDAQABo1MwUTAdBgNVHQ4EFgQUHawIRAo1bW8Xy7l4oKfM+ESjhs0wHwYDVR0j
BBgwFoAUHawIRAo1bW8Xy7l4oKfM+ESjhs0wDwYDVR0TAQH/BAUwAwEB/zANBgkq
hkiG9w0BAQsFAAOCAgEAozQJk5Ahx7rDn/aMXLdZFxR81VfkmHDm7NhlJsdVKUx5
o/iegXnvwc1PoeKsz2S504QiuL8l7jqZoU2WPIm7Vlr+oxBgiKqjo1EqBsUgNCZ7
qxMD84TVp/KBGjKUh1TXhjJwGGfNNr+R/fJGw+36UeuY3fSckjaYTuNuVElp+DoZ
/pGyu1qpcybLfiR8mpQkCeU/iBq5gIjWddbVjlYoTKfqULZrpsAF2AeqELEgyshl
p3PNhW/54TJSn4mWK+39BibYHPkvx8orEuWKyjjRk82hEXi7J3hsGKX29qC3oO40
67DKDWmZdMCz+E1ERf10V0bSp6iJnnlwknHJloZUETV1NY/DdoSC6e8CN0+0cQqL
aJefJ483O3sXyN3v3+DaEFBLPFgRFGZB7eaBwR2xAv/KfjT5dSyi+wA4LZAxsQMC
Q7UYGNAfHLNHJo/bsj12+JDhJaFZ/KoBKzyMUuEXmvjxXNDMCfm+gVQFoLyXkGq3
491W/O7LjR6pkD+ce0qeTFMu3nfUubyfbONVDEfuH4GC1e+FAggCRaBnFsVzCzXj
jxOOLoQ9nwLk8v17mx0BSwX4iuqvXFntfJbzfcnzQfx/qqPFheIbGnmKw1lrRML8
87ZbN6t01+v2YyYe6Mc7p80s1R3jc8aVX8ca2KcYwsJAkg/xz0q5RJwsE1is5UY=
-----END CERTIFICATE-----

View File

@ -1,52 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQDSC88PF8NyLkag
K5mAZ/d739SOU16l2Cx3zE35zZQhO29+1L4L+oMksLYipo+FMgtGO+MSzFsvGgKC
s2sDSdfyoNSTZ3QaN4BAZ0sbq+wLcke7yRBTM/XIGOQfhqq8yC2q8/zXwUbZg0Us
CAxDGNwUr0Qlm829laIU/UN1KcYS57Nebl1z05wMEvYmyl4JBAl9ozne7KS9DyW7
jbrAXE8TaEy3+pY66kx5GG6v2+upScITGm4YPmPPlpOF1UjQloosgxdVa+fVp8aN
Ca/rf0JNO0Uhb3OKOZ+4kYmpfPn/trwoKWAa6CV1uAJ+3zDkLMq1JNlrV4OMp1Qv
X0wzA47a/n466JMN9SFb0Ng5wf19VOtT5Zu7chDStBudVjxlMDfUixvhvn4sjbaL
NYR1fyWPoNXwr0KX2lpTP1QOzp9/Sd0iiJ8RPfXn8Xo26MStu4I52CZjS7yEMgJG
CLH/mgPuSbrHHYYrrrCPJgmQOZG2TNMI+EqOwQvHh2ghdv7t7EEk4IslBk0QzufM
XQ2WFXQ20nvj74mrmmiMuBcmonpR0egA5/M18ZPLQxYu0Q86NUr4XHtAG1fq+n8p
seQ7Avy6Gk6HRiezCbB7TJ9rnNeujie1TDajC6W7rx0VF7hcxkIrDgNgnYcjXUV2
hMx1lo4fIoWkL3nJJVEthMVIcJOXEwIDAQABAoICAF45S+ZSW6uh1K7PQCnY+a0J
CJncDk5JPhFzhds0fGm39tknaCWJeEECQIIkw6cVfvc/sCpjn9fuTAgDolK0UnoV
6aZCN1P3Z8H8VDYSlm3AEyvLE1avrWbYu6TkzTyoc8wHbXn/yt+SQnpxFccXpMpm
oSRZ0x5jvHS79AHf/mnGpLEMw0FNQOgtrVxTVYGn3PYOPcyhzXi+Dcgn2QmnnxVu
qVOyxqehKTL9YdHjzsB/RN868P5RJocd3gmgVuyzS0KSf+oi4Ln4bFoiaVc0HDL3
DpjkHSl5lgu+xclRNfifKaK+hM0tLHi1VfFB//WrnjdKU3oSpQF4oowprM4Jn5AP
jhRI54JWZlWnvbiAOx7D49xFga3EnqjVH6So2gxi+q3Dv25luXGAnueaBPDpVC6c
nkJm2aCl7T3xlVpW8O5Fs+rsP8Xr9RTyEQJauM01uOi3N2zEeO8ERxTYEW5Sy2U7
OFKRXtLj7Jnejib/SxWGcIX4Wid5QFAygbXz4APfFN22QU0fqmhm4/c2OB/xM8qr
VVFx4xlG2wnuq5CZdZjmK3MTbmSM+pWW8mly/+++p694cf5oXGenYus/JWFNwxj/
fPyA7zQmaTOidu6clDHzkPCOE7TBv9TkQ7lL6ClgE7B39JR65ZQtjCYqRsADKsGI
dFMg+HDmGbVEfWg2V0GBAoIBAQDupImrJ0JXHA/0SEC2Tbz7pE60fRwmBFdhvk4Z
rzZiaOl+M2HXQU6b5DYhKcgdiFah5IuAnsRPo6X5Ug+Q1DV3OFTuEGAkXgqZliNa
aXsJcc0++DYlXX3BrTb66gylVLQRs5tZzsXps5iXWclziDC2go8RKnCwxsxwbzVq
FP4hoBP4dp83WoLF4NznnGFGw3/KLlMivtRxDE5OegpxTuWGlA/bVtT187Ksuuz3
dFUayLfpg0ABS/E7wwAJjSUpPPEi3J/G255H3lZXgS1gWcAf3rGDQYlJKF8UHdja
yWQcAOF+b/bYEpa4lHw+UtKNNkPTiCV4Y7CNQd8a2Gcl7VFTAoIBAQDhUs9r1dhm
rUlNAunVZZZVZ91XhXeqVTa/9xUDEvDh91nB5c7CcuNXxwcX4oTsMF4Bc7CHlvOv
pybp+QLjK310VjxxkFYJT0TKWuYqLjtNkQ93sp8wF3gVCf8m8bMOX/gPfQzNZWKp
un+ZWnzXNU5d2A+63xbZmFzT0Zo6H/h9YEO5Xxw32HCKFzEhl5JD34muZTEXSpdD
p7LUUr5LvnoUqEzonhXx2qRnTLP87d1o0GlkVex9HeeeBgrvm57QYoJnABxw9UFM
/ocLeYsjkmqJQRBDWgiwQlos1pdZyX2Yj20b7Wm5Pxd4aM9gh5EZZMXeQHhbHlWz
UY1IPxfAkytBAoIBAHmYavFDisD58oMlAZwiViXeXaAHk30nfyK1pfPeXBaeoEKG
idb1VsmF6bLSKD4sBwBshExgGWT+3IYCMx43kpqRoGzA+UvugvYpExBxaJiyXMM2
E9jMH1S9HqOQ+CqR00KlwoVrH1rqANk1jbkJbtDAC4fSmSLp2Kd9crj/w1F80FAs
mQnKW5HZ9pUpEEPPP2DUY9XzaCnF/GxuML31VmxRKxc20kIUDzmF8VJQ+0Avf85C
6yz99gfeXzl+qq2teKyrv9nCc47pEhN6JZXPhV53yPk5PmuBX5jPcHxiW1kNddhH
0n3cUuHv/rJ+3vvG555z46vJF9+R7c0u8LfZiTMCggEBAMQd4a/IN0xXM1+2U3SL
sSew+XR+FMPK25aGJmHAkKz9L8CWlzmj6cCy2LevT2aMSqYU3eeGOZ//at1nAV5c
shsaHA30RQ5hUkyWhZLdHnzK752NeQTQyJH3W3+4C9NNMIm6m/QCdLeqPflqSxK9
sPH5ZueN2UOXW+R5oTVKMmxd51RnNhZdasamnPrSBFrTK/EA3pOZNsOKKRqo0jz3
Eyb7vcUSI6OYXFQU7OwO1RGvpKvSJb5Y0wo11DrtRnO16i5gaGDg9u9e8ofISJSz
kcrZOKCGst1HQ1mXhbB+sbSh0aPnJog4I+OHxkgMdvyVO6vQjXExnAIxzzi8wZ25
+oECggEBAIT6q/sn8xFt5Jwc/0Z7YUjd415Nknam09tnbB+UPRR6lt6JFoILx8by
5Y1sN30HWDv27v9G32oZhUDii3Rt3PkbYLqlHy7XBMEXA9WIUo+3Be7mtdL8Wfrj
0zn0b7Hks9a9KsElG1dXUopwjMRL3M22UamaN7e/gl5jz2I7pyc5oaqz9GRDV5yG
slb6gGZ5naMycJD3p8vutXbmgKRr9beRp55UICAbEMdr5p3ks8bfR33Z6t+a97u1
IxI5x5Lb0fdfvL8JK3nRWn7Uzbmm5Ni/OaODNKP+fIm9m2yDAs8LM8RGpPtk6i0d
qIRta3H9KNw2Mhpkm77TtUSV/W5aOmY=
-----END PRIVATE KEY-----

View File

@ -1,162 +0,0 @@
/*
micro http2 server with file monitoring and automatic app rebuild
- can process concurrent http requests
- monitors specified filed and folders for changes
- triggers library and application rebuild
- any build errors are immediately displayed and can be corrected without need for restart
- passthrough data compression
*/
const fs = require('fs');
const zlib = require('zlib');
const http = require('http');
const http2 = require('http2');
const path = require('path');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const chokidar = require('chokidar');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const log = require('@vladmandic/pilogger');
const build = require('./build.js');
// app configuration
// you can provide your server key and certificate or use provided self-signed ones
// self-signed certificate generated using:
// openssl req -x509 -newkey rsa:4096 -nodes -keyout https.key -out https.crt -days 365 -subj "/C=US/ST=Florida/L=Miami/O=@vladmandic"
// client app does not work without secure server since browsers enforce https for webcam access
const options = {
key: fs.readFileSync('server/https.key'),
cert: fs.readFileSync('server/https.crt'),
root: '..',
default: 'demo/index.html',
httpPort: 8000,
httpsPort: 8001,
insecureHTTPParser: false,
minElapsed: 2,
monitor: ['package.json', 'demo', 'src'],
};
// just some predefined mime types
const mime = {
'.html': 'text/html; charset=utf-8',
'.js': 'text/javascript; charset=utf-8',
'.css': 'text/css; charset=utf-8',
'.json': 'application/json; charset=utf-8',
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.gif': 'image/gif',
'.ico': 'image/x-icon',
'.svg': 'image/svg+xml',
'.wav': 'audio/wav',
'.mp4': 'video/mp4',
'.woff': 'font/woff',
'.woff2': 'font/woff2',
'.ttf': 'font/ttf',
'.wasm': 'application/wasm',
};
// checks for multiple events triggering within minElapsed and merge get into single event
let last = Date.now();
async function buildAll(evt, msg) {
const now = Date.now();
if ((now - last) > options.minElapsed) build.build(evt, msg, true);
else log.state('Build: merge event file', msg, evt);
last = now;
}
// watch filesystem for any changes and notify build when needed
async function watch() {
const watcher = chokidar.watch(options.monitor, {
persistent: true,
ignorePermissionErrors: false,
alwaysStat: false,
ignoreInitial: true,
followSymlinks: true,
usePolling: false,
useFsEvents: false,
atomic: true,
});
// single event handler for file add/change/delete
watcher
.on('add', (evt) => buildAll(evt, 'add'))
.on('change', (evt) => buildAll(evt, 'modify'))
.on('unlink', (evt) => buildAll(evt, 'remove'))
.on('error', (err) => log.error(`Client watcher error: ${err}`))
.on('ready', () => log.state('Monitoring:', options.monitor));
}
// get file content for a valid url request
function handle(url) {
return new Promise((resolve) => {
let obj = { ok: false };
obj.file = url;
if (!fs.existsSync(obj.file)) resolve(null);
obj.stat = fs.statSync(obj.file);
if (obj.stat.isFile()) obj.ok = true;
if (!obj.ok && obj.stat.isDirectory()) {
obj.file = path.join(obj.file, options.default);
// @ts-ignore
obj = handle(obj.file);
}
resolve(obj);
});
}
// process http requests
async function httpRequest(req, res) {
handle(path.join(__dirname, options.root, decodeURI(req.url)))
.then((result) => {
// get original ip of requestor, regardless if it's behind proxy or not
// eslint-disable-next-line dot-notation
const forwarded = (req.headers['forwarded'] || '').match(/for="\[(.*)\]:/);
const ip = (Array.isArray(forwarded) ? forwarded[1] : null) || req.headers['x-forwarded-for'] || req.ip || req.socket.remoteAddress;
if (!result || !result.ok) {
res.writeHead(404, { 'Content-Type': 'text/html' });
res.end('Error 404: Not Found\n', 'utf-8');
log.warn(`${req.method}/${req.httpVersion}`, res.statusCode, req.url, ip);
} else {
const ext = String(path.extname(result.file)).toLowerCase();
const contentType = mime[ext] || 'application/octet-stream';
const accept = req.headers['accept-encoding'] ? req.headers['accept-encoding'].includes('br') : false; // does target accept brotli compressed data
res.writeHead(200, {
// 'Content-Length': result.stat.size, // not using as it's misleading for compressed streams
'Content-Language': 'en', 'Content-Type': contentType, 'Content-Encoding': accept ? 'br' : '', 'Last-Modified': result.stat.mtime, 'Cache-Control': 'no-cache', 'X-Content-Type-Options': 'nosniff',
});
const compress = zlib.createBrotliCompress({ params: { [zlib.constants.BROTLI_PARAM_QUALITY]: 5 } }); // instance of brotli compression with level 5
const stream = fs.createReadStream(result.file);
if (!accept) stream.pipe(res); // don't compress data
else stream.pipe(compress).pipe(res); // compress data
// alternative methods of sending data
/// 2. read stream and send by chunk
// const stream = fs.createReadStream(result.file);
// stream.on('data', (chunk) => res.write(chunk));
// stream.on('end', () => res.end());
// 3. read entire file and send it as blob
// const data = fs.readFileSync(result.file);
// res.write(data);
log.data(`${req.method}/${req.httpVersion}`, res.statusCode, contentType, result.stat.size, req.url, ip);
}
return null;
})
.catch((err) => log.error('handle error:', err));
}
// app main entry point
async function main() {
log.header();
await watch();
if (options.httpPort && options.httpPort > 0) {
const server1 = http.createServer(options, httpRequest);
server1.on('listening', () => log.state('HTTP server listening:', options.httpPort));
server1.listen(options.httpPort);
}
if (options.httpsPort && options.httpsPort > 0) {
const server2 = http2.createSecureServer(options, httpRequest);
server2.on('listening', () => log.state('HTTP2 server listening:', options.httpsPort));
server2.listen(options.httpsPort);
}
await build.build('all', 'startup', true);
}
main();

View File

@ -10,9 +10,9 @@ export abstract class NeuralNetwork<TNetParams> {
this._name = name;
}
protected _params: TNetParams | undefined = undefined
protected _params: TNetParams | undefined = undefined;
protected _paramMappings: ParamMapping[] = []
protected _paramMappings: ParamMapping[] = [];
public _name: any;
@ -102,8 +102,9 @@ export abstract class NeuralNetwork<TNetParams> {
}
const { readFile } = env.getEnv();
const { manifestUri, modelBaseUri } = getModelUris(filePath, this.getDefaultModelName());
const fetchWeightsFromDisk = (filePaths: string[]) => Promise.all(filePaths.map((fp) => readFile(fp).then((buf) => buf.buffer)));
const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk);
const fetchWeightsFromDisk = (filePaths: string[]) => Promise.all(filePaths.map((fp) => readFile(fp).then((buf) => (typeof buf === 'string' ? Buffer.from(buf) : buf.buffer))));
// @ts-ignore async-vs-sync mismatch
const loadWeights = tf['io'].weightsLoaderFactory(fetchWeightsFromDisk);
const manifest = JSON.parse((await readFile(manifestUri)).toString());
const weightMap = await loadWeights(manifest, modelBaseUri);
this.loadFromWeightMap(weightMap);

View File

@ -1,6 +1,10 @@
export class PlatformBrowser {
private textEncoder: TextEncoder;
constructor() {
this.textEncoder = new TextEncoder();
}
fetch(path: string, init?: any): Promise<Response> {
return fetch(path, init);
}
@ -13,9 +17,6 @@ export class PlatformBrowser {
if (encoding !== 'utf-8' && encoding !== 'utf8') {
throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);
}
if (this.textEncoder == null) {
this.textEncoder = new TextEncoder();
}
return this.textEncoder.encode(text);
}

View File

@ -1,5 +1,4 @@
import * as tf from '../../dist/tfjs.esm';
import * as tf from '../../dist/tfjs.esm.js';
import { fullyConnectedLayer } from '../common/fullyConnectedLayer';
import { seperateWeightMaps } from '../faceProcessor/util';
import { TinyXception } from '../xception/TinyXception';
@ -10,7 +9,7 @@ import { NeuralNetwork } from '../NeuralNetwork';
import { NetInput, TNetInput, toNetInput } from '../dom/index';
export class AgeGenderNet extends NeuralNetwork<NetParams> {
private _faceFeatureExtractor: TinyXception
private _faceFeatureExtractor: TinyXception;
constructor(faceFeatureExtractor: TinyXception = new TinyXception(2)) {
super('AgeGenderNet');
@ -85,7 +84,7 @@ export class AgeGenderNet extends NeuralNetwork<NetParams> {
return 'age_gender_model';
}
public dispose(throwOnRedispose = true) {
public override dispose(throwOnRedispose = true) {
this.faceFeatureExtractor.dispose(throwOnRedispose);
super.dispose(throwOnRedispose);
}

View File

@ -7,10 +7,8 @@ export interface IBoundingBox {
bottom: number
}
export class BoundingBox extends Box<BoundingBox> implements IBoundingBox {
export class BoundingBox extends Box implements IBoundingBox {
constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions = false) {
super({
left, top, right, bottom,
}, allowNegativeDimensions);
super({ left, top, right, bottom }, allowNegativeDimensions);
}
}

View File

@ -19,13 +19,13 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
}
}
private _x: number
private _x: number;
private _y: number
private _y: number;
private _width: number
private _width: number;
private _height: number
private _height: number;
constructor(_box: IBoundingBox | IRect, allowNegativeDimensions = true) {
const box = (_box || {}) as any;
@ -128,9 +128,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
this.width + padX,
this.height + padY,
];
return new Box({
x, y, width, height,
});
return new Box({ x, y, width, height });
}
public clipAtImageBorders(imgWidth: number, imgHeight: number): Box<BoxType> {
@ -143,9 +141,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
const clippedWidth = Math.min(newWidth, imgWidth - clippedX);
const clippedHeight = Math.min(newHeight, imgHeight - clippedY);
return (new Box({
x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight,
})).floor();
return (new Box({ x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight })).floor();
}
public shift(sx: number, sy: number): Box<BoxType> {
@ -153,9 +149,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
const x = this.x + sx;
const y = this.y + sy;
return new Box({
x, y, width, height,
});
return new Box({ x, y, width, height });
}
public padAtBorders(imageHeight: number, imageWidth: number) {
@ -189,9 +183,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
y = 1;
}
return {
dy, edy, dx, edx, y, ey, x, ex, w, h,
};
return { dy, edy, dx, edx, y, ey, x, ex, w, h };
}
public calibrate(region: Box) {

View File

@ -6,9 +6,9 @@ export interface IDimensions {
}
export class Dimensions implements IDimensions {
private _width: number
private _width: number;
private _height: number
private _height: number;
constructor(width: number, height: number) {
if (!isValidNumber(width) || !isValidNumber(height)) {

View File

@ -17,7 +17,7 @@ export class FaceDetection extends ObjectDetection implements IFaceDetecion {
super(score, score, '', relativeBox, imageDims);
}
public forSize(width: number, height: number): FaceDetection {
public override forSize(width: number, height: number): FaceDetection {
const { score, relativeBox, imageDims } = super.forSize(width, height);
return new FaceDetection(score, relativeBox, imageDims);
}

View File

@ -18,11 +18,11 @@ export interface IFaceLandmarks {
}
export class FaceLandmarks implements IFaceLandmarks {
protected _shift: Point
protected _shift: Point;
protected _positions: Point[]
protected _positions: Point[];
protected _imgDims: Dimensions
protected _imgDims: Dimensions;
constructor(
relativeFaceLandmarkPositions: Point[],

View File

@ -3,7 +3,7 @@ import { FaceLandmarks } from './FaceLandmarks';
import { Point } from './Point';
export class FaceLandmarks5 extends FaceLandmarks {
protected getRefPointsForAlignment(): Point[] {
protected override getRefPointsForAlignment(): Point[] {
const pts = this.positions;
return [
pts[0],

View File

@ -31,7 +31,7 @@ export class FaceLandmarks68 extends FaceLandmarks {
return this.positions.slice(48, 68);
}
protected getRefPointsForAlignment(): Point[] {
protected override getRefPointsForAlignment(): Point[] {
return [
this.getLeftEye(),
this.getRightEye(),

View File

@ -6,9 +6,8 @@ export interface IFaceMatch {
}
export class FaceMatch implements IFaceMatch {
private _label: string
private _distance: number
private _label: string;
private _distance: number;
constructor(label: string, distance: number) {
this._label = label;

View File

@ -3,16 +3,15 @@ import { IBoundingBox } from './BoundingBox';
import { Box } from './Box';
import { IRect } from './Rect';
export class LabeledBox extends Box<LabeledBox> {
export class LabeledBox extends Box {
public static assertIsValidLabeledBox(box: any, callee: string) {
Box.assertIsValidBox(box, callee);
if (!isValidNumber(box.label)) {
throw new Error(`${callee} - expected property label (${box.label}) to be a number`);
}
}
private _label: number
private _label: number;
constructor(box: IBoundingBox | IRect | any, label: number) {
super(box);

View File

@ -1,7 +1,7 @@
export class LabeledFaceDescriptors {
private _label: string
private _label: string;
private _descriptors: Float32Array[]
private _descriptors: Float32Array[];
constructor(label: string, descriptors: Float32Array[]) {
if (!(typeof label === 'string')) {

View File

@ -3,15 +3,15 @@ import { Dimensions, IDimensions } from './Dimensions';
import { IRect, Rect } from './Rect';
export class ObjectDetection {
private _score: number
private _score: number;
private _classScore: number
private _classScore: number;
private _className: string
private _className: string;
private _box: Rect
private _box: Rect;
private _imageDims: Dimensions
private _imageDims: Dimensions;
constructor(
score: number,

View File

@ -4,9 +4,9 @@ export interface IPoint {
}
export class Point implements IPoint {
private _x: number
private _x: number;
private _y: number
private _y: number;
constructor(x: number, y: number) {
this._x = x;

View File

@ -15,9 +15,9 @@ export class PredictedBox extends LabeledBox {
}
}
private _score: number
private _score: number;
private _classScore: number
private _classScore: number;
constructor(box: IBoundingBox | IRect | any, label: number, score: number, classScore: number) {
super(box, label);

View File

@ -7,10 +7,8 @@ export interface IRect {
height: number
}
export class Rect extends Box<Rect> implements IRect {
export class Rect extends Box implements IRect {
constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions = false) {
super({
x, y, width, height,
}, allowNegativeDimensions);
super({ x, y, width, height }, allowNegativeDimensions);
}
}

View File

@ -9,17 +9,17 @@ import { imageToSquare } from './imageToSquare';
import { TResolvedNetInput } from './types';
export class NetInput {
private _imageTensors: Array<tf.Tensor3D | tf.Tensor4D> = []
private _imageTensors: Array<tf.Tensor3D | tf.Tensor4D> = [];
private _canvases: HTMLCanvasElement[] = []
private _canvases: HTMLCanvasElement[] = [];
private _batchSize: number
private _batchSize: number;
private _treatAsBatchInput = false
private _treatAsBatchInput = false;
private _inputDimensions: number[][] = []
private _inputDimensions: number[][] = [];
private _inputSize: number
private _inputSize = 0;
constructor(inputs: Array<TResolvedNetInput>, treatAsBatchInput = false) {
if (!Array.isArray(inputs)) {
@ -47,8 +47,9 @@ export class NetInput {
return;
}
// @ts-ignore
const canvas = (input as any) instanceof env.getEnv().Canvas ? input : createCanvasFromMedia(input);
this._canvases[idx] = canvas;
this._canvases[idx] = canvas as HTMLCanvasElement;
this._inputDimensions[idx] = [canvas.height, canvas.width, 3];
});
}
@ -127,23 +128,24 @@ export class NetInput {
if (input instanceof tf.Tensor) {
let imgTensor = isTensor4D(input) ? input : tf.expandDims(input);
imgTensor = padToSquare(imgTensor, isCenterInputs);
imgTensor = padToSquare(imgTensor as tf.Tensor4D, isCenterInputs);
if (imgTensor.shape[1] !== inputSize || imgTensor.shape[2] !== inputSize) {
imgTensor = tf.image.resizeBilinear(imgTensor, [inputSize, inputSize], false, false);
imgTensor = tf['image'].resizeBilinear(imgTensor as tf.Tensor4D, [inputSize, inputSize], false, false);
}
return imgTensor.as3D(inputSize, inputSize, 3);
}
if (input instanceof env.getEnv().Canvas) {
return tf.browser.fromPixels(imageToSquare(input, inputSize, isCenterInputs));
return tf['browser'].fromPixels(imageToSquare(input, inputSize, isCenterInputs));
}
throw new Error(`toBatchTensor - at batchIdx ${batchIdx}, expected input to be instanceof tf.Tensor or instanceof HTMLCanvasElement, instead have ${input}`);
});
const batchTensor = tf.stack(inputTensors.map((t) => tf.cast(t, 'float32'))).as4D(this.batchSize, inputSize, inputSize, 3);
// const batchTensor = tf.stack(inputTensors.map((t) => tf.cast(t, 'float32'))) as tf.Tensor4D;
return batchTensor;
});

View File

@ -4,7 +4,10 @@ import { isMediaLoaded } from './isMediaLoaded';
export function awaitMediaLoaded(media: HTMLImageElement | HTMLVideoElement | HTMLCanvasElement) {
// eslint-disable-next-line consistent-return
return new Promise((resolve, reject) => {
if (media instanceof env.getEnv().Canvas || isMediaLoaded(media)) return resolve(null);
if (media instanceof env.getEnv().Canvas || isMediaLoaded(media)) {
resolve(null);
return;
}
function onError(e: Event) {
if (!e.currentTarget) return;

View File

@ -25,17 +25,11 @@ export async function extractFaceTensors(imageTensor: tf.Tensor3D | tf.Tensor4D,
return tf.tidy(() => {
const [imgHeight, imgWidth, numChannels] = imageTensor.shape.slice(isTensor4D(imageTensor) ? 1 : 0);
const boxes = detections
.map((det) => (det instanceof FaceDetection
? det.forSize(imgWidth, imgHeight).box
: det))
const boxes = detections.map((det) => (det instanceof FaceDetection ? det.forSize(imgWidth, imgHeight).box : det))
.map((box) => box.clipAtImageBorders(imgWidth, imgHeight));
const faceTensors = boxes.map(({
x, y, width, height,
}) => tf.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels]));
const faceTensors = boxes
.filter((box) => box.width > 0 && box.height > 0)
.map(({ x, y, width, height }) => tf.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels]));
return faceTensors;
});
}

View File

@ -3,21 +3,10 @@ import { resolveInput } from './resolveInput';
export function getContext2dOrThrow(canvasArg: string | HTMLCanvasElement | CanvasRenderingContext2D): CanvasRenderingContext2D {
const { Canvas, CanvasRenderingContext2D } = env.getEnv();
if (canvasArg instanceof CanvasRenderingContext2D) {
return canvasArg;
}
if (canvasArg instanceof CanvasRenderingContext2D) return canvasArg;
const canvas = resolveInput(canvasArg);
if (!(canvas instanceof Canvas)) {
throw new Error('resolveContext2d - expected canvas to be of instance of Canvas');
}
const ctx = canvas.getContext('2d');
if (!ctx) {
throw new Error('resolveContext2d - canvas 2d context is null');
}
if (!(canvas instanceof Canvas)) throw new Error('resolveContext2d - expected canvas to be of instance of Canvas');
const ctx = canvas.getContext('2d', { willReadFrequently: true });
if (!ctx) throw new Error('resolveContext2d - canvas 2d context is null');
return ctx;
}

View File

@ -11,7 +11,7 @@ export async function imageTensorToCanvas(
const [height, width, numChannels] = imgTensor.shape.slice(isTensor4D(imgTensor) ? 1 : 0);
const imgTensor3D = tf.tidy(() => imgTensor.as3D(height, width, numChannels).toInt());
await tf.browser.toPixels(imgTensor3D, targetCanvas);
await tf['browser'].toPixels(imgTensor3D, targetCanvas);
imgTensor3D.dispose();

View File

@ -8,7 +8,8 @@ export async function loadWeightMap(
defaultModelName: string,
): Promise<tf.NamedTensorMap> {
const { manifestUri, modelBaseUri } = getModelUris(uri, defaultModelName);
// @ts-ignore
const manifest = await fetchJson<tf.io.WeightsManifestConfig>(manifestUri);
// if (manifest['weightsManifest']) manifest = manifest['weightsManifest'];
return tf.io.loadWeights(manifest, modelBaseUri);
return tf['io'].loadWeights(manifest, modelBaseUri);
}

View File

@ -1,11 +1,9 @@
import * as tf from '../../dist/tfjs.esm';
import type { Tensor3D, Tensor4D } from '../../dist/tfjs.esm';
import { NetInput } from './NetInput';
export type TMediaElement = HTMLImageElement | HTMLVideoElement | HTMLCanvasElement
export type TResolvedNetInput = TMediaElement | tf.Tensor3D | tf.Tensor4D
export type TResolvedNetInput = TMediaElement | Tensor3D | Tensor4D
export type TNetInputArg = string | TResolvedNetInput
export type TNetInput = TNetInputArg | Array<TNetInputArg> | NetInput | tf.Tensor4D
export type TNetInput = string | TResolvedNetInput | Array<string | TResolvedNetInput> | NetInput

View File

@ -11,13 +11,13 @@ export interface IDrawBoxOptions {
}
export class DrawBoxOptions {
public boxColor: string
public boxColor: string;
public lineWidth: number
public lineWidth: number;
public drawLabelOptions: DrawTextFieldOptions
public drawLabelOptions: DrawTextFieldOptions;
public label?: string
public label?: string;
constructor(options: IDrawBoxOptions = {}) {
const {
@ -36,9 +36,9 @@ export class DrawBoxOptions {
}
export class DrawBox {
public box: Box
public box: Box;
public options: DrawBoxOptions
public options: DrawBoxOptions;
constructor(
box: IBoundingBox | IRect,

View File

@ -17,17 +17,17 @@ export interface IDrawFaceLandmarksOptions {
}
export class DrawFaceLandmarksOptions {
public drawLines: boolean
public drawLines: boolean;
public drawPoints: boolean
public drawPoints: boolean;
public lineWidth: number
public lineWidth: number;
public pointSize: number
public pointSize: number;
public lineColor: string
public lineColor: string;
public pointColor: string
public pointColor: string;
constructor(options: IDrawFaceLandmarksOptions = {}) {
const {
@ -43,9 +43,9 @@ export class DrawFaceLandmarksOptions {
}
export class DrawFaceLandmarks {
public faceLandmarks: FaceLandmarks
public faceLandmarks: FaceLandmarks;
public options: DrawFaceLandmarksOptions
public options: DrawFaceLandmarksOptions;
constructor(
faceLandmarks: FaceLandmarks,

View File

@ -25,17 +25,17 @@ export interface IDrawTextFieldOptions {
}
export class DrawTextFieldOptions implements IDrawTextFieldOptions {
public anchorPosition: AnchorPosition
public anchorPosition: AnchorPosition;
public backgroundColor: string
public backgroundColor: string;
public fontColor: string
public fontColor: string;
public fontSize: number
public fontSize: number;
public fontStyle: string
public fontStyle: string;
public padding: number
public padding: number;
constructor(options: IDrawTextFieldOptions = {}) {
const {
@ -51,11 +51,11 @@ export class DrawTextFieldOptions implements IDrawTextFieldOptions {
}
export class DrawTextField {
public text: string[]
public text: string[];
public anchor : IPoint
public anchor : IPoint;
public options: DrawTextFieldOptions
public options: DrawTextFieldOptions;
constructor(
text: string | string[] | DrawTextField,

View File

@ -7,12 +7,7 @@ import { DrawTextField } from './DrawTextField';
export type DrawFaceExpressionsInput = FaceExpressions | WithFaceExpressions<{}>
export function drawFaceExpressions(
canvasArg: string | HTMLCanvasElement,
faceExpressions: DrawFaceExpressionsInput | Array<DrawFaceExpressionsInput>,
minConfidence = 0.1,
textFieldAnchor?: IPoint,
) {
export function drawFaceExpressions(canvasArg: string | HTMLCanvasElement, faceExpressions: DrawFaceExpressionsInput | Array<DrawFaceExpressionsInput>, minConfidence = 0.1, textFieldAnchor?: IPoint) {
const faceExpressionsArray = Array.isArray(faceExpressions) ? faceExpressions : [faceExpressions];
faceExpressionsArray.forEach((e) => {

View File

@ -1,26 +1,20 @@
import { FileSystem } from './types';
import { isNodejs } from './isNodejs';
export function createFileSystem(fs?: any): FileSystem {
let requireFsError = '';
if (!fs) {
if (!fs && isNodejs()) {
try {
// eslint-disable-next-line global-require
// eslint-disable-next-line global-require, @typescript-eslint/no-require-imports
fs = require('fs');
} catch (err) {
requireFsError = err.toString();
requireFsError = (err as any).toString();
}
}
const readFile = fs
? (filePath: string) => new Promise<Buffer>((resolve, reject) => {
fs.readFile(filePath, (err: any, buffer: Buffer) => (err ? reject(err) : resolve(buffer)));
})
: () => {
throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`);
};
return {
readFile,
};
// eslint-disable-next-line no-undef
? (filePath: string) => new Promise<string | Buffer>((resolve, reject) => { fs.readFile(filePath, (err: NodeJS.ErrnoException | null, buffer: string | Buffer) => (err ? reject(err) : resolve(buffer))); })
: () => { throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`); };
return { readFile };
}

View File

@ -3,11 +3,9 @@ import { createFileSystem } from './createFileSystem';
import { Environment } from './types';
export function createNodejsEnv(): Environment {
// eslint-disable-next-line dot-notation
const Canvas = global['Canvas'] || global.HTMLCanvasElement;
const Canvas: (new () => HTMLCanvasElement) = (global as any)['Canvas'] || global.HTMLCanvasElement;
const Image = global.Image || global.HTMLImageElement;
// eslint-disable-next-line dot-notation
const Video = global['Video'] || global.HTMLVideoElement;
const Video: (new () => HTMLVideoElement) = (global as any)['Video'] || global.HTMLVideoElement;
const createCanvasElement = () => {
if (Canvas) return new Canvas();

6
src/env/isNodejs.ts vendored
View File

@ -1,6 +1,6 @@
export function isNodejs(): boolean {
return typeof global === 'object'
&& typeof require === 'function'
&& typeof module !== 'undefined'
&& typeof process !== 'undefined' && !!process.version;
&& typeof process !== 'undefined'
&& process.versions != null
&& process.versions.node != null;
}

24
src/env/types.ts vendored
View File

@ -1,17 +1,17 @@
export type FileSystem = {
// eslint-disable-next-line no-unused-vars
readFile: (filePath: string) => Promise<Buffer>
}
readFile: (filePath: string) => Promise<string | Buffer>;
};
export type Environment = FileSystem & {
Canvas: typeof HTMLCanvasElement
CanvasRenderingContext2D: typeof CanvasRenderingContext2D
Image: typeof HTMLImageElement
ImageData: typeof ImageData
Video: typeof HTMLVideoElement
createCanvasElement: () => HTMLCanvasElement
createImageElement: () => HTMLImageElement
createVideoElement: () => HTMLVideoElement
Canvas: typeof HTMLCanvasElement;
CanvasRenderingContext2D: typeof CanvasRenderingContext2D;
Image: typeof HTMLImageElement;
ImageData: typeof ImageData;
Video: typeof HTMLVideoElement;
createCanvasElement: () => HTMLCanvasElement;
createImageElement: () => HTMLImageElement;
createVideoElement: () => HTMLVideoElement;
// eslint-disable-next-line no-undef, no-unused-vars
fetch: (url: string, init?: RequestInit) => Promise<Response>
}
fetch: (url: string, init?: RequestInit) => Promise<Response>;
};

View File

@ -1,12 +1,10 @@
export function euclideanDistance(arr1: number[] | Float32Array, arr2: number[] | Float32Array) {
if (arr1.length !== arr2.length) throw new Error('euclideanDistance: arr1.length !== arr2.length');
const desc1 = Array.from(arr1);
const desc2 = Array.from(arr2);
return Math.sqrt(
desc1
.map((val, i) => val - desc2[i])
.reduce((res, diff) => res + (diff ** 2), 0),
.reduce((res, diff) => res + (diff * diff), 0),
);
}

View File

@ -1,19 +1,13 @@
export const FACE_EXPRESSION_LABELS = ['neutral', 'happy', 'sad', 'angry', 'fearful', 'disgusted', 'surprised'];
export const FACE_EXPRESSION_LABELS = ['neutral', 'happy', 'sad', 'angry', 'fearful', 'disgusted', 'surprised'] as const;
export class FaceExpressions {
public neutral: number
public happy: number
public sad: number
public angry: number
public fearful: number
public disgusted: number
public surprised: number
public neutral = 0;
public happy = 0;
public sad = 0;
public angry = 0;
public fearful = 0;
public disgusted = 0;
public surprised = 0;
constructor(probabilities: number[] | Float32Array) {
if (probabilities.length !== 7) {

View File

@ -13,7 +13,7 @@ export abstract class FaceProcessor<
TExtractorParams extends FaceFeatureExtractorParams | TinyFaceFeatureExtractorParams
>
extends NeuralNetwork<NetParams> {
protected _faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>
protected _faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>;
constructor(_name: string, faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>) {
super(_name);
@ -24,7 +24,7 @@ export abstract class FaceProcessor<
return this._faceFeatureExtractor;
}
protected abstract getDefaultModelName(): string
protected abstract override getDefaultModelName(): string
protected abstract getClassifierChannelsIn(): number
@ -45,7 +45,7 @@ export abstract class FaceProcessor<
});
}
public dispose(throwOnRedispose = true) {
public override dispose(throwOnRedispose = true) {
this.faceFeatureExtractor.dispose(throwOnRedispose);
super.dispose(throwOnRedispose);
}

View File

@ -51,7 +51,7 @@ export class FaceRecognitionNet extends NeuralNetwork<NetParams> {
const globalAvg = out.mean([1, 2]) as tf.Tensor2D;
const fullyConnected = tf.matMul(globalAvg, params.fc);
return fullyConnected;
return fullyConnected as tf.Tensor2D;
});
}
@ -60,6 +60,7 @@ export class FaceRecognitionNet extends NeuralNetwork<NetParams> {
}
public async computeFaceDescriptor(input: TNetInput): Promise<Float32Array|Float32Array[]> {
// @ts-ignore
if (input?.shape?.some((dim) => dim <= 0)) return new Float32Array(128);
const netInput = await toNetInput(input);
const faceDescriptorTensors = tf.tidy(() => tf.unstack(this.forwardInput(netInput)));

View File

@ -1,3 +1,4 @@
import { Point } from '../classes';
import { FaceDetection } from '../classes/FaceDetection';
import { FaceLandmarks } from '../classes/FaceLandmarks';
import { FaceLandmarks68 } from '../classes/FaceLandmarks68';
@ -5,75 +6,106 @@ import { isWithFaceDetection, WithFaceDetection } from './WithFaceDetection';
export type WithFaceLandmarks<
TSource extends WithFaceDetection<{}>,
TFaceLandmarks extends FaceLandmarks = FaceLandmarks68 > = TSource & {
landmarks: TFaceLandmarks,
unshiftedLandmarks: TFaceLandmarks,
alignedRect: FaceDetection,
angle: { roll: number | undefined, pitch: number | undefined, yaw: number | undefined },
}
TFaceLandmarks extends FaceLandmarks = FaceLandmarks68
> = TSource & {
landmarks: TFaceLandmarks;
unshiftedLandmarks: TFaceLandmarks;
alignedRect: FaceDetection;
angle: {
roll: number | undefined;
pitch: number | undefined;
yaw: number | undefined;
};
};
export function isWithFaceLandmarks(obj: any): obj is WithFaceLandmarks<WithFaceDetection<{}>, FaceLandmarks> {
return isWithFaceDetection(obj)
// eslint-disable-next-line dot-notation
&& obj['landmarks'] instanceof FaceLandmarks
// eslint-disable-next-line dot-notation
&& obj['unshiftedLandmarks'] instanceof FaceLandmarks
// eslint-disable-next-line dot-notation
&& obj['alignedRect'] instanceof FaceDetection;
export function isWithFaceLandmarks(
obj: any,
): obj is WithFaceLandmarks<WithFaceDetection<{}>, FaceLandmarks> {
return (
isWithFaceDetection(obj)
&& (obj as any)['landmarks'] instanceof FaceLandmarks
&& (obj as any)['unshiftedLandmarks'] instanceof FaceLandmarks
&& (obj as any)['alignedRect'] instanceof FaceDetection
);
}
function calculateFaceAngle(mesh) {
// returns the angle in the plane (in radians) between the positive x-axis and the ray from (0,0) to the point (x,y)
const radians = (a1, a2, b1, b2) => (Math.atan2(b2 - a2, b1 - a1) % Math.PI);
// convert radians to degrees
function calculateFaceAngle(mesh: FaceLandmarks) {
// Helper to convert radians to degrees
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
const degrees = (theta) => (theta * 180) / Math.PI;
const degrees = (radians: number) => (radians * 180) / Math.PI;
const calcLengthBetweenTwoPoints = (a: Point, b: Point) => Math.sqrt((a.x - b.x) ** 2 + (a.y - b.y) ** 2);
const angle = { roll: <number | undefined>undefined, pitch: <number | undefined>undefined, yaw: <number | undefined>undefined };
const angle = {
roll: <number | undefined>undefined,
pitch: <number | undefined>undefined,
yaw: <number | undefined>undefined,
};
if (!mesh || !mesh._positions || mesh._positions.length !== 68) return angle;
const pt = mesh._positions;
const calcYaw = (leftPoint: Point, midPoint: Point, rightPoint: Point) => {
// Calc x-distance from left side of the face ("ear") to facial midpoint ("nose")
const leftToMidpoint = Math.floor(leftPoint.x - midPoint.x);
// Calc x-distance from facial midpoint ("nose") to the right side of the face ("ear")
const rightToMidpoint = Math.floor(midPoint.x - rightPoint.x);
// Difference in distances coincidentally approximates to angles
return leftToMidpoint - rightToMidpoint;
};
// values are in radians in range of -pi/2 to pi/2 which is -90 to +90 degrees
// value of 0 means center
const calcRoll = (lever: Point, pivot: Point) => {
// When rolling, the head seems to pivot from the nose/lips/chin area.
// So, we'll choose any two points from the facial midline, where the first point should be the pivot, and the other "lever"
// Plan/Execution: get the hypotenuse & opposite sides of a 90deg triangle ==> Calculate angle in radians
const hypotenuse = Math.hypot(pivot.x - lever.x, pivot.y - lever.y);
const opposite = pivot.y - lever.y;
const angleInRadians = Math.asin(opposite / hypotenuse);
const angleInDegrees = degrees(angleInRadians);
const normalizeAngle = Math.floor(90 - angleInDegrees);
// If lever more to the left of the pivot, then we're tilting left
// "-" is negative direction. "+", or absence of a sign is positive direction
const tiltDirection = pivot.x - lever.x < 0 ? -1 : 1;
const result = normalizeAngle * tiltDirection;
return result;
};
// roll is face lean from left to right
// comparing x,y of outside corners of leftEye and rightEye
angle.roll = -radians(pt[36]._x, pt[36]._y, pt[45]._x, pt[45]._y);
const calcPitch = (leftPoint: Point, midPoint: Point, rightPoint: Point) => {
// Theory: While pitching, the nose is the most salient point --> That's what we'll use to make a trianle.
// The "base" is between point that don't move when we pitch our head (i.e. an imaginary line running ear to ear through the nose).
// Executuin: Get the opposite & adjacent lengths of the triangle from the ear's perspective. Use it to get angle.
// pitch is face turn from left right
// comparing x distance of top of nose to left and right edge of face
// precision is lacking since coordinates are not precise enough
angle.pitch = radians(0, Math.abs(pt[0]._x - pt[30]._x) / pt[30]._x, Math.PI, Math.abs(pt[16]._x - pt[30]._x) / pt[30]._x);
// yaw is face move from up to down
// comparing size of the box around the face with top and bottom of detected landmarks
// silly hack, but this gives us face compression on y-axis
// e.g., tilting head up hides the forehead that doesn't have any landmarks so ratio drops
const bottom = pt.reduce((prev, cur) => (prev < cur._y ? prev : cur._y), +Infinity);
const top = pt.reduce((prev, cur) => (prev > cur._y ? prev : cur._y), -Infinity);
angle.yaw = Math.PI * (mesh._imgDims._height / (top - bottom) / 1.40 - 1);
const base = calcLengthBetweenTwoPoints(leftPoint, rightPoint);
// adjecent is base/2 technically.
const baseCoords = new Point((leftPoint.x + rightPoint.x) / 2, (leftPoint.y + rightPoint.y) / 2);
const midToBaseLength = calcLengthBetweenTwoPoints(midPoint, baseCoords);
const angleInRadians = Math.atan(midToBaseLength / base);
const angleInDegrees = Math.floor(degrees(angleInRadians));
// Account for directionality.
// pitch forwards (_i.e. tilting your head forwards) is positive (or no sign); backward is negative.
const direction = baseCoords.y - midPoint.y < 0 ? -1 : 1;
const result = angleInDegrees * direction;
return result;
};
if (!mesh || !mesh.positions || mesh.positions.length !== 68) return angle;
const pt = mesh.positions;
angle.roll = calcRoll(pt[27], pt[66]);
angle.pitch = calcPitch(pt[14], pt[30], pt[2]);
angle.yaw = calcYaw(pt[14], pt[33], pt[2]);
return angle;
}
export function extendWithFaceLandmarks<
TSource extends WithFaceDetection<{}>,
TFaceLandmarks extends FaceLandmarks = FaceLandmarks68 >(sourceObj: TSource, unshiftedLandmarks: TFaceLandmarks): WithFaceLandmarks<TSource, TFaceLandmarks> {
export function extendWithFaceLandmarks<TSource extends WithFaceDetection<{}>, TFaceLandmarks extends FaceLandmarks = FaceLandmarks68>(
sourceObj: TSource,
unshiftedLandmarks: TFaceLandmarks,
): WithFaceLandmarks<TSource, TFaceLandmarks> {
const { box: shift } = sourceObj.detection;
const landmarks = unshiftedLandmarks.shiftBy<TFaceLandmarks>(shift.x, shift.y);
const rect = landmarks.align();
const { imageDims } = sourceObj.detection;
const alignedRect = new FaceDetection(sourceObj.detection.score, rect.rescale(imageDims.reverse()), imageDims);
const alignedRect = new FaceDetection(
sourceObj.detection.score,
rect.rescale(imageDims.reverse()),
imageDims,
);
const angle = calculateFaceAngle(unshiftedLandmarks);
const extension = {
landmarks,
unshiftedLandmarks,
alignedRect,
angle,
};
const extension = { landmarks, unshiftedLandmarks, alignedRect, angle };
return { ...sourceObj, ...extension };
}

View File

@ -21,7 +21,7 @@ export class ComputeFaceDescriptorsTaskBase<TReturn, TParentReturn> extends Comp
}
export class ComputeAllFaceDescriptorsTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends ComputeFaceDescriptorsTaskBase<WithFaceDescriptor<TSource>[], TSource[]> {
public async run(): Promise<WithFaceDescriptor<TSource>[]> {
public override async run(): Promise<WithFaceDescriptor<TSource>[]> {
const parentResults = await this.parentTask;
const descriptors = await extractAllFacesAndComputeResults<TSource, Float32Array[]>(
parentResults,
@ -43,20 +43,17 @@ export class ComputeAllFaceDescriptorsTask<TSource extends WithFaceLandmarks<Wit
}
export class ComputeSingleFaceDescriptorTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends ComputeFaceDescriptorsTaskBase<WithFaceDescriptor<TSource> | undefined, TSource | undefined> {
public async run(): Promise<WithFaceDescriptor<TSource> | undefined> {
public override async run(): Promise<WithFaceDescriptor<TSource> | undefined> {
const parentResult = await this.parentTask;
if (!parentResult) {
return undefined;
}
if (!parentResult) return undefined;
const descriptor = await extractSingleFaceAndComputeResult<TSource, Float32Array>(
parentResult,
this.input,
(face) => nets.faceRecognitionNet.computeFaceDescriptor(face) as Promise<Float32Array>,
null,
// eslint-disable-next-line no-shadow
// eslint-disable-next-line no-shadow, @typescript-eslint/no-shadow
(parentResult) => parentResult.landmarks.align(null, { useDlibAlignment: true }),
);
return extendWithFaceDescriptor(parentResult, descriptor);
}

View File

@ -33,17 +33,18 @@ export class DetectFaceLandmarksTaskBase<TReturn, TParentReturn> extends Composa
}
export class DetectAllFaceLandmarksTask<TSource extends WithFaceDetection<{}>> extends DetectFaceLandmarksTaskBase<WithFaceLandmarks<TSource>[], TSource[]> {
public async run(): Promise<WithFaceLandmarks<TSource>[]> {
public override async run(): Promise<WithFaceLandmarks<TSource>[]> {
const parentResults = await this.parentTask;
const detections = parentResults.map((res) => res.detection);
const faces: Array<HTMLCanvasElement | tf.Tensor3D> = this.input instanceof tf.Tensor
? await extractFaceTensors(this.input, detections)
: await extractFaces(this.input, detections);
const faceLandmarksByFace = await Promise.all(
faces.map((face) => this.landmarkNet.detectLandmarks(face)),
) as FaceLandmarks68[];
const faceLandmarksByFace = await Promise.all(faces.map((face) => this.landmarkNet.detectLandmarks(face))) as FaceLandmarks68[];
faces.forEach((f) => f instanceof tf.Tensor && f.dispose());
return parentResults.map((parentResult, i) => extendWithFaceLandmarks<TSource>(parentResult, faceLandmarksByFace[i]));
const result = parentResults
.filter((_parentResult, i) => faceLandmarksByFace[i])
.map((parentResult, i) => extendWithFaceLandmarks<TSource>(parentResult, faceLandmarksByFace[i]));
return result;
}
withFaceExpressions() {
@ -60,7 +61,7 @@ export class DetectAllFaceLandmarksTask<TSource extends WithFaceDetection<{}>> e
}
export class DetectSingleFaceLandmarksTask<TSource extends WithFaceDetection<{}>> extends DetectFaceLandmarksTaskBase<WithFaceLandmarks<TSource> | undefined, TSource | undefined> {
public async run(): Promise<WithFaceLandmarks<TSource> | undefined> {
public override async run(): Promise<WithFaceLandmarks<TSource> | undefined> {
const parentResult = await this.parentTask;
if (!parentResult) {
return undefined;

View File

@ -20,7 +20,7 @@ export class DetectFacesTaskBase<TReturn> extends ComposableTask<TReturn> {
}
export class DetectAllFacesTask extends DetectFacesTaskBase<FaceDetection[]> {
public async run(): Promise<FaceDetection[]> {
public override async run(): Promise<FaceDetection[]> {
const { input, options } = this;
let result;
if (options instanceof TinyFaceDetectorOptions) result = nets.tinyFaceDetector.locateFaces(input, options);
@ -62,7 +62,7 @@ export class DetectAllFacesTask extends DetectFacesTaskBase<FaceDetection[]> {
}
export class DetectSingleFaceTask extends DetectFacesTaskBase<FaceDetection | undefined> {
public async run(): Promise<FaceDetection | undefined> {
public override async run(): Promise<FaceDetection | undefined> {
const faceDetections = await new DetectAllFacesTask(this.input, this.options);
let faceDetectionWithHighestScore = faceDetections[0];
faceDetections.forEach((faceDetection) => {

View File

@ -4,38 +4,19 @@ import { euclideanDistance } from '../euclideanDistance';
import { WithFaceDescriptor } from '../factories/index';
export class FaceMatcher {
private _labeledDescriptors: LabeledFaceDescriptors[]
private _labeledDescriptors: LabeledFaceDescriptors[];
private _distanceThreshold: number;
private _distanceThreshold: number
constructor(
inputs: LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array | Array<LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array>,
distanceThreshold = 0.6,
) {
constructor(inputs: LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array | Array<LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array>, distanceThreshold = 0.6) {
this._distanceThreshold = distanceThreshold;
const inputArray = Array.isArray(inputs) ? inputs : [inputs];
if (!inputArray.length) {
throw new Error('FaceRecognizer.constructor - expected atleast one input');
}
if (!inputArray.length) throw new Error('FaceRecognizer.constructor - expected atleast one input');
let count = 1;
const createUniqueLabel = () => `person ${count++}`;
this._labeledDescriptors = inputArray.map((desc) => {
if (desc instanceof LabeledFaceDescriptors) {
return desc;
}
if (desc instanceof Float32Array) {
return new LabeledFaceDescriptors(createUniqueLabel(), [desc]);
}
if (desc.descriptor && desc.descriptor instanceof Float32Array) {
return new LabeledFaceDescriptors(createUniqueLabel(), [desc.descriptor]);
}
if (desc instanceof LabeledFaceDescriptors) return desc;
if (desc instanceof Float32Array) return new LabeledFaceDescriptors(createUniqueLabel(), [desc]);
if (desc.descriptor && desc.descriptor instanceof Float32Array) return new LabeledFaceDescriptors(createUniqueLabel(), [desc.descriptor]);
throw new Error('FaceRecognizer.constructor - expected inputs to be of type LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array | Array<LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array>');
});
}
@ -47,36 +28,29 @@ export class FaceMatcher {
public computeMeanDistance(queryDescriptor: Float32Array, descriptors: Float32Array[]): number {
return descriptors
.map((d) => euclideanDistance(d, queryDescriptor))
.reduce((d1, d2) => d1 + d2, 0)
/ (descriptors.length || 1);
.reduce((d1, d2) => d1 + d2, 0) / (descriptors.length || 1);
}
public matchDescriptor(queryDescriptor: Float32Array): FaceMatch {
return this.labeledDescriptors
.map(({ descriptors, label }) => new FaceMatch(
label,
this.computeMeanDistance(queryDescriptor, descriptors),
))
.map(({ descriptors, label }) => new FaceMatch(label, this.computeMeanDistance(queryDescriptor, descriptors)))
.reduce((best, curr) => (best.distance < curr.distance ? best : curr));
}
public findBestMatch(queryDescriptor: Float32Array): FaceMatch {
const bestMatch = this.matchDescriptor(queryDescriptor);
return bestMatch.distance < this.distanceThreshold
? bestMatch
: new FaceMatch('unknown', bestMatch.distance);
return (bestMatch.distance < this._distanceThreshold) ? bestMatch : new FaceMatch('unknown', bestMatch.distance);
}
public toJSON(): any {
return {
distanceThreshold: this.distanceThreshold,
labeledDescriptors: this.labeledDescriptors.map((ld) => ld.toJSON()),
distanceThreshold: this._distanceThreshold,
labeledDescriptors: this._labeledDescriptors.map((ld) => ld.toJSON()),
};
}
public static fromJSON(json: any): FaceMatcher {
const labeledDescriptors = json.labeledDescriptors
.map((ld: any) => LabeledFaceDescriptors.fromJSON(ld));
const labeledDescriptors = json.labeledDescriptors.map((ld: any) => LabeledFaceDescriptors.fromJSON(ld));
return new FaceMatcher(labeledDescriptors, json.distanceThreshold);
}
}

View File

@ -27,7 +27,7 @@ export class PredictAgeAndGenderTaskBase<TReturn, TParentReturn> extends Composa
}
export class PredictAllAgeAndGenderTask<TSource extends WithFaceDetection<{}>> extends PredictAgeAndGenderTaskBase<WithAge<WithGender<TSource>>[], TSource[]> {
public async run(): Promise<WithAge<WithGender<TSource>>[]> {
public override async run(): Promise<WithAge<WithGender<TSource>>[]> {
const parentResults = await this.parentTask;
const ageAndGenderByFace = await extractAllFacesAndComputeResults<TSource, AgeAndGenderPrediction[]>(
parentResults,
@ -47,7 +47,7 @@ export class PredictAllAgeAndGenderTask<TSource extends WithFaceDetection<{}>> e
}
export class PredictSingleAgeAndGenderTask<TSource extends WithFaceDetection<{}>> extends PredictAgeAndGenderTaskBase<WithAge<WithGender<TSource>> | undefined, TSource | undefined> {
public async run(): Promise<WithAge<WithGender<TSource>> | undefined> {
public override async run(): Promise<WithAge<WithGender<TSource>> | undefined> {
const parentResult = await this.parentTask;
if (!parentResult) return undefined;
const { age, gender, genderProbability } = await extractSingleFaceAndComputeResult<TSource, AgeAndGenderPrediction>(
@ -65,7 +65,7 @@ export class PredictSingleAgeAndGenderTask<TSource extends WithFaceDetection<{}>
}
export class PredictAllAgeAndGenderWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictAllAgeAndGenderTask<TSource> {
withFaceExpressions() {
override withFaceExpressions() {
return new PredictAllFaceExpressionsWithFaceAlignmentTask(this, this.input);
}
@ -75,7 +75,7 @@ export class PredictAllAgeAndGenderWithFaceAlignmentTask<TSource extends WithFac
}
export class PredictSingleAgeAndGenderWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictSingleAgeAndGenderTask<TSource> {
withFaceExpressions() {
override withFaceExpressions() {
return new PredictSingleFaceExpressionsWithFaceAlignmentTask(this, this.input);
}

View File

@ -26,7 +26,7 @@ export class PredictFaceExpressionsTaskBase<TReturn, TParentReturn> extends Comp
}
export class PredictAllFaceExpressionsTask<TSource extends WithFaceDetection<{}>> extends PredictFaceExpressionsTaskBase<WithFaceExpressions<TSource>[], TSource[]> {
public async run(): Promise<WithFaceExpressions<TSource>[]> {
public override async run(): Promise<WithFaceExpressions<TSource>[]> {
const parentResults = await this.parentTask;
const faceExpressionsByFace = await extractAllFacesAndComputeResults<TSource, FaceExpressions[]>(
@ -49,7 +49,7 @@ export class PredictAllFaceExpressionsTask<TSource extends WithFaceDetection<{}>
}
export class PredictSingleFaceExpressionsTask<TSource extends WithFaceDetection<{}>> extends PredictFaceExpressionsTaskBase<WithFaceExpressions<TSource> | undefined, TSource | undefined> {
public async run(): Promise<WithFaceExpressions<TSource> | undefined> {
public override async run(): Promise<WithFaceExpressions<TSource> | undefined> {
const parentResult = await this.parentTask;
if (!parentResult) {
return undefined;
@ -71,7 +71,7 @@ export class PredictSingleFaceExpressionsTask<TSource extends WithFaceDetection<
}
export class PredictAllFaceExpressionsWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictAllFaceExpressionsTask<TSource> {
withAgeAndGender() {
override withAgeAndGender() {
return new PredictAllAgeAndGenderWithFaceAlignmentTask(this, this.input);
}
@ -81,7 +81,7 @@ export class PredictAllFaceExpressionsWithFaceAlignmentTask<TSource extends With
}
export class PredictSingleFaceExpressionsWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictSingleFaceExpressionsTask<TSource> {
withAgeAndGender() {
override withAgeAndGender() {
return new PredictSingleAgeAndGenderWithFaceAlignmentTask(this, this.input);
}

View File

@ -22,6 +22,7 @@ export * from './euclideanDistance';
export * from './NeuralNetwork';
export * from './resizeResults';
const node = (typeof process !== 'undefined');
const browser = (typeof navigator !== 'undefined') && (typeof navigator.userAgent !== 'undefined');
export const version = { faceapi: pkg.version as string, node, browser };
export const version = pkg.version as string;
// set webgl defaults
// if (browser) tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true);

View File

@ -8,40 +8,24 @@ import * as tf from '../../dist/tfjs.esm';
* both sides of the minor dimension oof the image.
* @returns The padded tensor with width === height.
*/
export function padToSquare(
imgTensor: tf.Tensor4D,
isCenterImage = false,
): tf.Tensor4D {
export function padToSquare(imgTensor: tf.Tensor4D, isCenterImage = false): tf.Tensor4D {
return tf.tidy(() => {
const [height, width] = imgTensor.shape.slice(1);
if (height === width) {
return imgTensor;
}
if (height === width) return imgTensor;
const dimDiff = Math.abs(height - width);
const paddingAmount = Math.round(dimDiff * (isCenterImage ? 0.5 : 1));
const paddingAxis = height > width ? 2 : 1;
const createPaddingTensor = (paddingAmountLocal: number): tf.Tensor => {
const paddingTensorShape = imgTensor.shape.slice();
paddingTensorShape[paddingAxis] = paddingAmountLocal;
return tf.fill(paddingTensorShape, 0, 'float32');
};
const paddingTensorAppend = createPaddingTensor(paddingAmount);
const remainingPaddingAmount = dimDiff - (paddingTensorAppend.shape[paddingAxis] as number);
const paddingTensorPrepend = isCenterImage && remainingPaddingAmount
? createPaddingTensor(remainingPaddingAmount)
: null;
const tensorsToStack = [
paddingTensorPrepend,
imgTensor,
paddingTensorAppend,
]
const paddingTensorPrepend = isCenterImage && remainingPaddingAmount ? createPaddingTensor(remainingPaddingAmount) : null;
const tensorsToStack = [paddingTensorPrepend, imgTensor, paddingTensorAppend]
.filter((t) => !!t)
.map((t: tf.Tensor) => tf.cast(t, 'float32')) as tf.Tensor4D[];
.map((t) => tf.cast(t as tf.Tensor4D, 'float32')) as tf.Tensor4D[];
return tf.concat(tensorsToStack, paddingAxis);
});
}

View File

@ -4,11 +4,11 @@ export interface ISsdMobilenetv1Options {
}
export class SsdMobilenetv1Options {
protected _name = 'SsdMobilenetv1Options'
protected _name = 'SsdMobilenetv1Options';
private _minConfidence: number
private _minConfidence: number;
private _maxResults: number
private _maxResults: number;
constructor({ minConfidence, maxResults }: ISsdMobilenetv1Options = {}) {
this._minConfidence = minConfidence || 0.5;

View File

@ -1,7 +1,15 @@
/* eslint-disable import/no-extraneous-dependencies */
/* eslint-disable node/no-unpublished-import */
// wrapper to load tfjs in a single place so version can be changed quickly
/**
* Creates tfjs bundle used by Human browser build target
* @external
*/
// export all from build bundle
export * from '@tensorflow/tfjs/dist/index.js';
export * from '@tensorflow/tfjs-backend-wasm';
export * from '@tensorflow/tfjs-backend-webgl/dist/index.js';
export * from '@tensorflow/tfjs-backend-wasm/dist/index.js';
// add webgpu to bundle, experimental
// export * from '@tensorflow/tfjs-backend-webgpu/dist/index.js';
// export versions, overrides version object from @tensorflow/tfjs
export { version } from '../../dist/tfjs.version.js';

Some files were not shown because too many files have changed in this diff Show More