Compare commits

..

No commits in common. "master" and "1.3.1" have entirely different histories.

658 changed files with 235842 additions and 21278 deletions

View File

@ -1,148 +0,0 @@
{
"log": {
"enabled": false,
"debug": false,
"console": true,
"output": "build.log"
},
"profiles": {
"production": ["compile", "typings", "typedoc", "lint", "changelog"],
"development": ["serve", "watch", "compile"]
},
"clean": {
"locations": ["dist/*", "typedoc/*", "types/lib/src"]
},
"lint": {
"locations": [ "src/" ],
"rules": { }
},
"changelog": {
"log": "CHANGELOG.md"
},
"serve": {
"sslKey": "cert/https.key",
"sslCrt": "cert/https.crt",
"httpPort": 8000,
"httpsPort": 8001,
"documentRoot": ".",
"defaultFolder": "demo",
"defaultFile": "index.html"
},
"build": {
"global": {
"target": "es2018",
"treeShaking": true,
"ignoreAnnotations": true,
"sourcemap": false,
"banner": { "js": "/*\n Face-API\n homepage: <https://github.com/vladmandic/face-api>\n author: <https://github.com/vladmandic>'\n*/\n" }
},
"targets": [
{
"name": "tfjs/browser/tf-version",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-version.ts",
"output": "dist/tfjs.version.js"
},
{
"name": "tfjs/node/cpu",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/cpu",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/node/gpu",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node-gpu.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/gpu",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node-gpu.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/node/wasm",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node-wasm.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/wasm",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node-wasm.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/browser/esm/nobundle",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-browser.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/browser/esm/nobundle",
"platform": "browser",
"format": "esm",
"input": "src/index.ts",
"output": "dist/face-api.esm-nobundle.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/browser/esm/bundle",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-browser.ts",
"output": "dist/tfjs.esm.js"
},
{
"name": "faceapi/browser/iife/bundle",
"platform": "browser",
"format": "iife",
"globalName": "faceapi",
"minify": true,
"input": "src/index.ts",
"output": "dist/face-api.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/browser/esm/bundle",
"platform": "browser",
"format": "esm",
"sourcemap": true,
"input": "src/index.ts",
"output": "dist/face-api.esm.js",
"typings": "types/lib",
"typedoc": "typedoc",
"external": ["@tensorflow"]
}
]
},
"watch": {
"enabled": true,
"locations": [ "src/**" ]
},
"typescript": {
"allowJs": false
}
}

View File

@ -7,7 +7,7 @@
"es2020": true "es2020": true
}, },
"parser": "@typescript-eslint/parser", "parser": "@typescript-eslint/parser",
"parserOptions": { "ecmaVersion": "latest" }, "parserOptions": { "ecmaVersion": 2020 },
"plugins": [ "plugins": [
"@typescript-eslint" "@typescript-eslint"
], ],
@ -17,6 +17,7 @@
"plugin:import/warnings", "plugin:import/warnings",
"plugin:node/recommended", "plugin:node/recommended",
"plugin:promise/recommended", "plugin:promise/recommended",
"plugin:json/recommended-with-comments",
"plugin:@typescript-eslint/eslint-recommended", "plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended", "plugin:@typescript-eslint/recommended",
"airbnb-base" "airbnb-base"
@ -28,11 +29,8 @@
"@typescript-eslint/ban-ts-comment": "off", "@typescript-eslint/ban-ts-comment": "off",
"@typescript-eslint/explicit-module-boundary-types": "off", "@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/no-var-requires": "off", "@typescript-eslint/no-var-requires": "off",
"@typescript-eslint/no-empty-object-type": "off",
"@typescript-eslint/no-require-imports": "off",
"camelcase": "off", "camelcase": "off",
"class-methods-use-this": "off", "class-methods-use-this": "off",
"default-param-last": "off",
"dot-notation": "off", "dot-notation": "off",
"func-names": "off", "func-names": "off",
"guard-for-in": "off", "guard-for-in": "off",
@ -58,7 +56,6 @@
"no-restricted-syntax": "off", "no-restricted-syntax": "off",
"no-return-assign": "off", "no-return-assign": "off",
"no-underscore-dangle": "off", "no-underscore-dangle": "off",
"no-promise-executor-return": "off",
"node/no-missing-import": ["error", { "tryExtensions": [".js", ".json", ".ts"] }], "node/no-missing-import": ["error", { "tryExtensions": [".js", ".json", ".ts"] }],
"node/no-unpublished-import": "off", "node/no-unpublished-import": "off",
"node/no-unpublished-require": "off", "node/no-unpublished-require": "off",

13
.github/FUNDING.yml vendored
View File

@ -1,13 +0,0 @@
# These are supported funding model platforms
github: [vladmandic]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
node_modules node_modules
pnpm-lock.yaml pnpm-lock.yaml

View File

@ -2,4 +2,3 @@ node_modules
pnpm-lock.yaml pnpm-lock.yaml
typedoc typedoc
test test
types/lib

4
.npmrc
View File

@ -1,5 +1 @@
force = true force = true
production=true
legacy-peer-deps=true
strict-peer-dependencies=false
node-options='--no-deprecation'

View File

@ -1,3 +0,0 @@
{
"typescript.tsdk": "node_modules/typescript/lib"
}

View File

@ -1,168 +1,15 @@
# @vladmandic/face-api # @vladmandic/face-api
Version: **1.7.15** Version: **1.3.1**
Description: **FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS** Description: **FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS**
Author: **Vladimir Mandic <mandic00@live.com>** Author: **Vladimir Mandic <mandic00@live.com>**
License: **MIT** License: **MIT** </LICENSE>
Repository: **<https://github.com/vladmandic/face-api>** Repository: **<git+https://github.com/vladmandic/face-api.git>**
## Changelog ## Changelog
### **1.7.15** 2025/02/05 mandic00@live.com ### **HEAD -> master** 2021/06/08 mandic00@live.com
### **origin/master** 2024/09/10 mandic00@live.com
### **1.7.14** 2024/09/10 mandic00@live.com
- rebuild
- merge pull request #188 from rebser/master
- fixing leaking eventhandlers when using htmlcanvaselement
- rebuild types
- rebuild
### **1.7.13** 2024/01/17 mandic00@live.com
- merge pull request #186 from khwalkowicz/master
- feat: enable noimplicitany
### **release: 1.7.12** 2023/06/12 mandic00@live.com
### **1.7.12** 2023/06/12 mandic00@live.com
### **1.7.11** 2023/05/08 mandic00@live.com
### **1.7.10** 2023/03/21 mandic00@live.com
- change typedefs
### **1.7.9** 2023/01/29 mandic00@live.com
### **1.7.8** 2023/01/06 mandic00@live.com
### **1.7.7** 2022/12/01 mandic00@live.com
### **1.7.6** 2022/10/18 mandic00@live.com
- fix face angles (yaw, pitch, & roll) accuracy (#130)
### **1.7.5** 2022/10/09 mandic00@live.com
- create funding.yml
- add node-wasm demo
### **1.7.4** 2022/09/25 mandic00@live.com
- improve face compare performance
### **1.7.3** 2022/08/24 mandic00@live.com
- refresh release
### **1.7.2** 2022/08/23 mandic00@live.com
- document and remove optional dependencies
### **release: 1.7.1** 2022/07/25 mandic00@live.com
### **1.7.1** 2022/07/25 mandic00@live.com
- refactor dependencies
- full rebuild
### **1.6.11** 2022/05/24 mandic00@live.com
### **1.6.10** 2022/05/24 mandic00@live.com
### **1.6.9** 2022/05/18 mandic00@live.com
### **1.6.8** 2022/05/09 mandic00@live.com
- exclude impossible detected face boxes
### **1.6.7** 2022/04/01 mandic00@live.com
- fixed typo error (#97)
### **1.6.6** 2022/03/04 mandic00@live.com
### **1.6.5** 2022/02/07 mandic00@live.com
### **1.6.4** 2022/01/14 mandic00@live.com
- add node with wasm build target
### **1.6.3** 2022/01/06 mandic00@live.com
### **1.6.2** 2022/01/01 mandic00@live.com
### **1.6.1** 2021/12/09 mandic00@live.com
- rebuild
- release preview
- switch to custom tfjs and new typedefs
- rebuild
### **1.5.8** 2021/11/30 mandic00@live.com
### **1.5.7** 2021/10/28 mandic00@live.com
### **1.5.6** 2021/10/22 mandic00@live.com
### **release: 1.5.5** 2021/10/19 mandic00@live.com
### **1.5.5** 2021/10/19 mandic00@live.com
- allow backend change in demo via url params
- add node-match demo
- fix face matcher
### **1.5.4** 2021/09/29 mandic00@live.com
### **1.5.3** 2021/09/16 mandic00@live.com
- simplify tfjs imports
- reduce bundle size
- enable webgl uniforms
### **1.5.2** 2021/09/10 mandic00@live.com
- redesign build platform
### **1.5.1** 2021/09/08 mandic00@live.com
### **1.4.2** 2021/08/31 mandic00@live.com
### **release: 1.4.1** 2021/07/29 mandic00@live.com
### **1.4.1** 2021/07/29 mandic00@live.com
### **release: 1.3.1** 2021/06/18 mandic00@live.com
### **1.3.1** 2021/06/08 mandic00@live.com ### **1.3.1** 2021/06/08 mandic00@live.com
@ -257,61 +104,111 @@
- add badges - add badges
- optimize for npm - optimize for npm
- 0.30.6
### **0.30.6** 2021/03/08 mandic00@live.com
- added typings for face angle - added typings for face angle
- disable landmark printing - disable landmark printing
- 0.30.5
### **0.30.5** 2021/03/07 mandic00@live.com
- enabled live demo on gitpages - enabled live demo on gitpages
- 0.30.4
### **0.30.4** 2021/03/07 mandic00@live.com
- added face angle calculations - added face angle calculations
- added documentation - added documentation
- package update - package update
- 0.30.3
- 0.30.2 ### **0.30.3** 2021/03/04 mandic00@live.com
- 0.30.1
- 0.13.3
### **0.30.2** 2021/02/26 mandic00@live.com
### **0.30.1** 2021/02/25 mandic00@live.com
### **0.13.3** 2021/02/21 mandic00@live.com
- added note-cpu target - added note-cpu target
- merge pull request #39 from xemle/feature/node-cpu - merge pull request #39 from xemle/feature/node-cpu
- add node-cpu build for non supported systems of libtensorflow - add node-cpu build for non supported systems of libtensorflow
- 0.13.2
- 0.13.1 ### **0.13.2** 2021/02/20 mandic00@live.com
- 0.12.10
- exception handling
- 0.12.9 ### **0.13.1** 2021/02/20 mandic00@live.com
- exception handling
- 0.12.8
### **0.12.10** 2021/02/20 mandic00@live.com
- exception handling - exception handling
### **0.12.9** 2021/02/20 mandic00@live.com
### **0.12.8** 2021/02/20 mandic00@live.com
### **0.12.7** 2021/02/17 mandic00@live.com ### **0.12.7** 2021/02/17 mandic00@live.com
- 0.12.7 - 0.12.7
- 0.12.6
- 0.12.5 ### **0.12.6** 2021/02/13 mandic00@live.com
- 0.12.4
- 0.12.3
- 0.12.2 ### **0.12.5** 2021/02/12 mandic00@live.com
### **0.12.4** 2021/02/06 mandic00@live.com
### **0.12.3** 2021/02/06 mandic00@live.com
### **0.12.2** 2021/02/02 mandic00@live.com
### **update for tfjs 3.0.0** 2021/01/29 mandic00@live.com ### **update for tfjs 3.0.0** 2021/01/29 mandic00@live.com
- 0.12.1
### **0.12.1** 2021/01/29 mandic00@live.com
- rebuild - rebuild
- 0.11.6
### **0.11.6** 2021/01/24 mandic00@live.com
- add check for null face descriptor - add check for null face descriptor
- merge pull request #34 from patrickhulce/patch-1 - merge pull request #34 from patrickhulce/patch-1
- fix: return empty descriptor for zero-sized faces - fix: return empty descriptor for zero-sized faces
- 0.11.5
- 0.11.4 ### **0.11.5** 2021/01/22 mandic00@live.com
- 0.11.3
### **0.11.4** 2021/01/22 mandic00@live.com
### **0.11.3** 2021/01/20 mandic00@live.com
- fix typo - fix typo
- enable full minification - enable full minification
- 0.11.2
### **0.11.2** 2021/01/12 mandic00@live.com
- full rebuild - full rebuild
- 0.11.1
### **0.11.1** 2021/01/10 mandic00@live.com
- added live webcam demo - added live webcam demo
- 0.10.2
### **0.10.2** 2021/01/03 mandic00@live.com
- ts linting - ts linting
- version bump - version bump
- 0.10.1
### **0.10.1** 2020/12/23 mandic00@live.com
- full re-lint and typings generation - full re-lint and typings generation
- rebuild - rebuild

182
README.md
View File

@ -53,18 +53,18 @@ Example can be accessed directly using Git pages using URL:
### NodeJS ### NodeJS
NodeJS examples are: Three NodeJS examples are:
- `/demo/node-simple.js`:
Simplest possible NodeJS demo for FaceAPI in under 30 lines of JavaScript code
- `/demo/node.js`: - `/demo/node.js`:
Regular usage of `FaceAPI` from `NodeJS`
Using `TFJS` native methods to load images without external dependencies Using `TFJS` native methods to load images without external dependencies
- `/demo/node-canvas.js` and `/demo/node-image.js`: - `/demo/node-canvas.js`:
Regular usage of `FaceAPI` from `NodeJS`
Using external `canvas` module to load images Using external `canvas` module to load images
Which also allows for image drawing and saving inside `NodeJS` environment Which also allows for image drawing and saving inside `NodeJS` environment
- `/demo/node-match.js`: - `/demo/node-wasm.js`:
Simple demo that compares face similarity from a given image Same as `node-canvas`, but using `WASM` backend in `NodeJS` environment
to a second image or list of images in a folder Because why not :)
- `/demo/node-multiprocess.js`: - `/demo/node-multiprocess.js`:
Multiprocessing showcase that uses pool of worker processes Multiprocessing showcase that uses pool of worker processes
(`node-multiprocess-worker.js`) (`node-multiprocess-worker.js`)
@ -104,11 +104,8 @@ NodeJS examples are:
2021-03-14 08:42:09 STATE: Main: worker exit: 1888019 0 2021-03-14 08:42:09 STATE: Main: worker exit: 1888019 0
``` ```
### NodeJS Notes Note that `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu`
- Supported NodeJS versions are **14** up to **22** must be installed before using NodeJS example
NodeJS version **23** and higher are not supported due to incompatibility with TensorFlow/JS
- `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu`
must be installed before using any **NodeJS** examples
<br><hr><br> <br><hr><br>
@ -136,6 +133,8 @@ Simply include latest version of `FaceAPI` directly from a CDN in your HTML:
*without* TFJS pre-bundled *without* TFJS pre-bundled
- `dist/face-api.node-gpu.js`: CommonJS format for server-side NodeJS execution - `dist/face-api.node-gpu.js`: CommonJS format for server-side NodeJS execution
*without* TFJS pre-bundled and optimized for CUDA GPU acceleration *without* TFJS pre-bundled and optimized for CUDA GPU acceleration
- `dist/face-api.node-cpu.js`: CommonJS format for server-side NodeJS execution
*without* TFJS pre-bundled and using JS engine for platforms where tensorflow binary library version is not available
Defaults are: Defaults are:
@ -152,7 +151,7 @@ Bundled `TFJS` can be used directly via export: `faceapi.tf`
Reason for additional `nobundle` version is if you want to Reason for additional `nobundle` version is if you want to
include a specific version of TFJS and not rely on pre-packaged one include a specific version of TFJS and not rely on pre-packaged one
`FaceAPI` is compatible with TFJS 2.0+ and TFJS 3.0+ `FaceAPI` is compatible with TFJS 2.0+
All versions include `sourcemap` All versions include `sourcemap`
@ -261,7 +260,7 @@ If you want to GPU Accelerated execution in NodeJS, you must have CUDA libraries
Then install appropriate version of `FaceAPI`: Then install appropriate version of `FaceAPI`:
```shell ```shell
npm install @tensorflow/tfjs-node-gpu npm install @tensorflow/tfjs-node
npm install @vladmandic/face-api npm install @vladmandic/face-api
``` ```
@ -272,24 +271,18 @@ And then use with:
const faceapi = require('@vladmandic/face-api/dist/face-api.node-gpu.js'); // this loads face-api version with correct bindings for tfjs-node-gpu const faceapi = require('@vladmandic/face-api/dist/face-api.node-gpu.js'); // this loads face-api version with correct bindings for tfjs-node-gpu
``` ```
If you want to use `FaceAPI` in a NodeJS on platforms where **tensorflow** binary libraries are not supported, you can use NodeJS **WASM** backend. If you want to use `FaceAPI` in a NodeJS on platforms where NodeJS binary libraries are not supported, you can use JavaScript CPU backend.
```shell ```shell
npm install @tensorflow/tfjs npm install @tensorflow/tfjs
npm install @tensorflow/tfjs-backend-wasm
npm install @vladmandic/face-api npm install @vladmandic/face-api
``` ```
And then use with: And then use with:
```js ```js
const tf = require('@tensorflow/tfjs'); const tf = require('@tensorflow/tfjs')
const wasm = require('@tensorflow/tfjs-backend-wasm'); const faceapi = require('@vladmandic/face-api/dist/face-api.node-cpu.js');
const faceapi = require('@vladmandic/face-api/dist/face-api.node-wasm.js'); // use this when using face-api in dev mode
wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/');
await tf.setBackend('wasm');
await tf.ready();
...
``` ```
If you want to use graphical functions inside NodeJS, If you want to use graphical functions inside NodeJS,
@ -317,14 +310,12 @@ faceapi.env.monkeyPatch({ Canvas, Image, ImageData })
## Weights ## Weights
Pretrained models and their weights are included in `./model`. Pretrained models and their weights are includes in `./model`.
<br><hr><br> <br><hr><br>
## Test & Dev Web Server ## Test & Dev Web Server
To install development dependencies, use `npm install --production=false`
Built-in test&dev web server can be started using Built-in test&dev web server can be started using
```shell ```shell
@ -336,47 +327,24 @@ By default it starts HTTP server on port 8000 and HTTPS server on port 8001 and
- <https://localhost:8001/demo/index.html> - <https://localhost:8001/demo/index.html>
- <https://localhost:8001/demo/webcam.html> - <https://localhost:8001/demo/webcam.html>
```js ```json
2022-01-14 09:56:19 INFO: @vladmandic/face-api version 1.6.4 2021-06-04 09:15:08 INFO: @vladmandic/face-api version 1.3.0
2022-01-14 09:56:19 INFO: User: vlado Platform: linux Arch: x64 Node: v17.2.0 2021-06-04 09:15:08 INFO: User: vlado Platform: linux Arch: x64 Node: v16.0.0
2022-01-14 09:56:19 INFO: Application: { name: '@vladmandic/face-api', version: '1.6.4' } 2021-06-04 09:15:08 INFO: Build: file startup all target: es2018
2022-01-14 09:56:19 INFO: Environment: { profile: 'development', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true } 2021-06-04 09:15:08 STATE: HTTP server listening: 8000
2022-01-14 09:56:19 INFO: Toolchain: { build: '0.6.7', esbuild: '0.14.11', typescript: '4.5.4', typedoc: '0.22.10', eslint: '8.6.0' } 2021-06-04 09:15:08 STATE: HTTP2 server listening: 8001
2022-01-14 09:56:19 INFO: Build: { profile: 'development', steps: [ 'serve', 'watch', 'compile' ] } 2021-06-04 09:15:08 STATE: Build for: node type: tfjs: { imports: 1, importBytes: 143, outputBytes: 1327, outputFiles: 'dist/tfjs.esm.js' }
2022-01-14 09:56:19 STATE: WebServer: { ssl: false, port: 8000, root: '.' } 2021-06-04 09:15:08 STATE: Monitoring: [ 'package.json', 'demo', 'src', [length]: 3 ]
2022-01-14 09:56:19 STATE: WebServer: { ssl: true, port: 8001, root: '.', sslKey: 'build/cert/https.key', sslCrt: 'build/cert/https.crt' } 2021-06-04 09:15:08 STATE: Build for: node type: node: { imports: 162, importBytes: 234251, outputBytes: 175089, outputFiles: 'dist/face-api.node.js' }
2022-01-14 09:56:19 STATE: Watch: { locations: [ 'src/**', 'README.md', 'src/**', 'src/**' ] } 2021-06-04 09:15:09 STATE: Build for: nodeGPU type: tfjs: { imports: 1, importBytes: 147, outputBytes: 1335, outputFiles: 'dist/tfjs.esm.js' }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 1276 } 2021-06-04 09:15:09 STATE: Build for: nodeGPU type: node: { imports: 162, importBytes: 234259, outputBytes: 175097, outputFiles: 'dist/face-api.node-gpu.js' }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234787, outputBytes: 175203 } 2021-06-04 09:15:09 STATE: Build for: nodeCPU type: tfjs: { imports: 1, importBytes: 138, outputBytes: 1326, outputFiles: 'dist/tfjs.esm.js' }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 1296 } 2021-06-04 09:15:09 STATE: Build for: nodeCPU type: node: { imports: 162, importBytes: 234250, outputBytes: 175088, outputFiles: 'dist/face-api.node-cpu.js' }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234807, outputBytes: 175219 } 2021-06-04 09:15:09 STATE: Build for: browserNoBundle type: tfjs: { imports: 1, importBytes: 276, outputBytes: 277, outputFiles: 'dist/tfjs.esm.js' }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 1367 } 2021-06-04 09:15:09 STATE: Build for: browserNoBundle type: esm: { imports: 162, importBytes: 233201, outputBytes: 168707, outputFiles: 'dist/face-api.esm-nobundle.js' }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234878, outputBytes: 175294 } 2021-06-04 09:15:09 STATE: Build for: browserBundle type: tfjs: { modules: 1348, moduleBytes: 4323957, imports: 7, importBytes: 276, outputBytes: 2328203, outputFiles: 'dist/tfjs.esm.js' }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 1662 } 2021-06-04 09:15:10 STATE: Build for: browserBundle type: iife: { imports: 162, importBytes: 2561127, outputBytes: 2448241, outputFiles: 'dist/face-api.js' }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 2172, outputBytes: 811 } 2021-06-04 09:15:11 STATE: Build for: browserBundle type: esm: { imports: 162, importBytes: 2561127, outputBytes: 2327046, outputFiles: 'dist/face-api.esm.js' }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234322, outputBytes: 169437 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 2172, outputBytes: 2444105 }
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 2677616, outputBytes: 1252572 }
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 2677616, outputBytes: 2435063 }
2022-01-14 09:56:20 INFO: Listening...
...
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/html', size: 1047, url: '/', remote: '::1' }
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 6919, url: '/index.js', remote: '::1' }
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 2435063, url: '/dist/face-api.esm.js', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 4125244, url: '/dist/face-api.esm.js.map', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 3219, url: '/model/tiny_face_detector_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 193321, url: '/model/tiny_face_detector_model.bin', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 28233, url: '/model/ssd_mobilenetv1_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 5616957, url: '/model/ssd_mobilenetv1_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8392, url: '/model/age_gender_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 429708, url: '/model/age_gender_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8485, url: '/model/face_landmark_68_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 356840, url: '/model/face_landmark_68_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 19615, url: '/model/face_recognition_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 6444032, url: '/model/face_recognition_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 6980, url: '/model/face_expression_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 329468, url: '/model/face_expression_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'image/jpeg', size: 144516, url: '/sample1.jpg', remote: '::1' }
``` ```
<br><hr><br> <br><hr><br>
@ -400,41 +368,38 @@ cd face-api
Then install all dependencies and run rebuild: Then install all dependencies and run rebuild:
```shell ```shell
npm install --production=false npm install
npm run build npm run build
``` ```
Build process uses `@vladmandic/build` module that creates optimized build for each target: Build process uses script `build.js` that creates optimized build for each target:
```js ```text
> @vladmandic/face-api@1.7.1 build /home/vlado/dev/face-api > @vladmandic/face-api@1.0.2 build
> node build.js > rimraf dist/* types/* typedoc/* && node server/build.js
```
2022-07-25 08:21:05 INFO: Application: { name: '@vladmandic/face-api', version: '1.7.1' } ```json
2022-07-25 08:21:05 INFO: Environment: { profile: 'production', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true } 2021-06-04 09:13:42 INFO: @vladmandic/face-api version 1.3.0
2022-07-25 08:21:05 INFO: Toolchain: { build: '0.7.7', esbuild: '0.14.50', typescript: '4.7.4', typedoc: '0.23.9', eslint: '8.20.0' } 2021-06-04 09:13:42 INFO: User: vlado Platform: linux Arch: x64 Node: v16.0.0
2022-07-25 08:21:05 INFO: Build: { profile: 'production', steps: [ 'clean', 'compile', 'typings', 'typedoc', 'lint', 'changelog' ] } 2021-06-04 09:13:42 INFO: Toolchain: tfjs: 3.7.0 esbuild 0.12.6; typescript 4.2.4; typedoc: 0.20.36 eslint: 7.27.0
2022-07-25 08:21:05 STATE: Clean: { locations: [ 'dist/*', 'typedoc/*', 'types/lib/src' ] } 2021-06-04 09:13:42 INFO: Build: file startup all target: es2018
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 614 } 2021-06-04 09:13:42 STATE: Build for: node type: tfjs: { imports: 1, importBytes: 143, outputBytes: 1327, outputFiles: 'dist/tfjs.esm.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234137, outputBytes: 85701 } 2021-06-04 09:13:42 STATE: Build for: node type: node: { imports: 162, importBytes: 234251, outputBytes: 175089, outputFiles: 'dist/face-api.node.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 618 } 2021-06-04 09:13:42 STATE: Build for: nodeGPU type: tfjs: { imports: 1, importBytes: 147, outputBytes: 1335, outputFiles: 'dist/tfjs.esm.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234141, outputBytes: 85705 } 2021-06-04 09:13:42 STATE: Build for: nodeGPU type: node: { imports: 162, importBytes: 234259, outputBytes: 175097, outputFiles: 'dist/face-api.node-gpu.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 670 } 2021-06-04 09:13:42 STATE: Build for: nodeCPU type: tfjs: { imports: 1, importBytes: 138, outputBytes: 1326, outputFiles: 'dist/tfjs.esm.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234193, outputBytes: 85755 } 2021-06-04 09:13:42 STATE: Build for: nodeCPU type: node: { imports: 162, importBytes: 234250, outputBytes: 175088, outputFiles: 'dist/face-api.node-cpu.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 400 } 2021-06-04 09:13:42 STATE: Build for: browserNoBundle type: tfjs: { imports: 1, importBytes: 276, outputBytes: 277, outputFiles: 'dist/tfjs.esm.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 910, outputBytes: 527 } 2021-06-04 09:13:42 STATE: Build for: browserNoBundle type: esm: { imports: 162, importBytes: 233201, outputBytes: 168707, outputFiles: 'dist/face-api.esm-nobundle.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234050, outputBytes: 82787 } 2021-06-04 09:13:43 STATE: Build for: browserBundle type: tfjs: { modules: 1348, moduleBytes: 4323957, imports: 7, importBytes: 276, outputBytes: 2328203, outputFiles: 'dist/tfjs.esm.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 910, outputBytes: 1184871 } 2021-06-04 09:13:44 STATE: Build for: browserBundle type: iife: { imports: 162, importBytes: 2561127, outputBytes: 2448241, outputFiles: 'dist/face-api.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 1418394, outputBytes: 1264631 } 2021-06-04 09:13:45 STATE: Build for: browserBundle type: esm: { imports: 162, importBytes: 2561127, outputBytes: 2327046, outputFiles: 'dist/face-api.esm.js' }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 1418394, outputBytes: 1264150 } 2021-06-04 09:13:45 INFO: Running Linter: [ 'server/', 'demo/', 'src/', 'test/', [length]: 4 ]
2022-07-25 08:21:07 STATE: Typings: { input: 'src/index.ts', output: 'types/lib', files: 93 } 2021-06-04 09:14:10 INFO: Linter complete: files: 183 errors: 0 warnings: 0
2022-07-25 08:21:09 STATE: TypeDoc: { input: 'src/index.ts', output: 'typedoc', objects: 154, generated: true } 2021-06-04 09:14:10 INFO: Compile typings: [ 'src/index.ts', [length]: 1 ]
2022-07-25 08:21:13 STATE: Lint: { locations: [ 'src/' ], files: 174, errors: 0, warnings: 0 } 2021-06-04 09:14:15 INFO: Update Change log: [ '/home/vlado/dev/face-api/CHANGELOG.md', [length]: 1 ]
2022-07-25 08:21:14 STATE: ChangeLog: { repository: 'https://github.com/vladmandic/face-api', branch: 'master', output: 'CHANGELOG.md' } 2021-06-04 09:14:15 INFO: Generate TypeDocs: [ 'src/index.ts', [length]: 1 ]
2022-07-25 08:21:14 INFO: Done...
2022-07-25 08:21:14 STATE: Copy: { input: 'types/lib/dist/tfjs.esm.d.ts' }
2022-07-25 08:21:15 STATE: API-Extractor: { succeeeded: true, errors: 0, warnings: 417 }
2022-07-25 08:21:15 INFO: FaceAPI Build complete...
``` ```
<br><hr><br> <br><hr><br>
@ -449,14 +414,18 @@ Build process uses `@vladmandic/build` module that creates optimized build for e
## Note ## Note
This is updated **face-api.js** with latest available TensorFlow/JS as the original is not compatible with **tfjs >=2.0**. This is updated **face-api.js** with latest available TensorFlow/JS as the original is not compatible with **tfjs 2.0+**.
Forked from [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2** which was released on March 22nd, 2020 Forked from [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2** which was released on March 22nd, 2020
*Why?* I needed a FaceAPI that does not cause version conflict with newer versions of TensorFlow Currently based on **`TensorFlow/JS` 3.6.1**
And since the original FaceAPI was open-source, I've released this version as well
Changes ended up being too large for a simple pull request and it ended up being a full-fledged version on its own *Why?* I needed FaceAPI that does not cause version conflict with newer versions of TensorFlow
Plus many features were added since the original inception And since original FaceAPI was open-source, I've released this version as well
Changes ended up being too large for a simple pull request
and it ended up being a full-fledged version on its own
Plus many features were added since original inception
Although a lot of work has gone into this version of `FaceAPI` and it will continue to be maintained, Although a lot of work has gone into this version of `FaceAPI` and it will continue to be maintained,
at this time it is completely superseded by my newer library `Human` which covers the same use cases, at this time it is completely superseded by my newer library `Human` which covers the same use cases,
@ -471,18 +440,14 @@ but extends it with newer AI models, additional detection details, compatibility
Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2**: Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2**:
- Compatible with `TensorFlow/JS 2.0+, 3.0+ and 4.0+` - Compatible with `TensorFlow/JS 2.0+ & 3.0+`
Currently using **`TensorFlow/JS` 4.16**
Original `face-api.js` is based on `TFJS` **1.7.4**
- Compatible with `WebGL`, `CPU` and `WASM` TFJS Browser backends - Compatible with `WebGL`, `CPU` and `WASM` TFJS Browser backends
- Compatible with both `tfjs-node` and `tfjs-node-gpu` TFJS NodeJS backends - Compatible with both `tfjs-node` and `tfjs-node-gpu` TFJS NodeJS backends
- Updated all type castings for TypeScript type checking to `TypeScript 5.3` - Updated all type castings for TypeScript type checking to `TypeScript 4.3`
- Switched bundling from `UMD` to `ESM` + `CommonJS` with fallback to `IIFE` - Switched bundling from `UMD` to `ESM` + `CommonJS` with fallback to `IIFE`
Resulting code is optimized per-platform instead of being universal Resulting code is optimized per-platform instead of being universal
Fully tree shakable when imported as an `ESM` module Fully tree shakable when imported as an `ESM` module
Browser bundle process uses `ESBuild` instead of `Rollup` Browser bundle process uses `ESBuild` instead of `Rollup`
- Added separate `face-api` versions with `tfjs` pre-bundled and without `tfjs`
When using `-nobundle` version, user can load any version of `tfjs` manually
- Typescript build process now targets `ES2018` and instead of dual `ES5`/`ES6` - Typescript build process now targets `ES2018` and instead of dual `ES5`/`ES6`
Resulting code is clean ES2018 JavaScript without polyfills Resulting code is clean ES2018 JavaScript without polyfills
- Removed old tests, docs, examples - Removed old tests, docs, examples
@ -498,7 +463,6 @@ Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) vers
- Added `face angle` calculations that returns `roll`, `yaw` and `pitch` - Added `face angle` calculations that returns `roll`, `yaw` and `pitch`
- Added `typdoc` automatic API specification generation during build - Added `typdoc` automatic API specification generation during build
- Added `changelog` automatic generation during build - Added `changelog` automatic generation during build
- New process to generate **TypeDocs** bundle using API-Extractor
<br> <br>

View File

@ -1,38 +0,0 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
"mainEntryPointFilePath": "types/lib/src/index.d.ts",
"bundledPackages": ["@tensorflow/tfjs-core", "@tensorflow/tfjs-converter", "@types/offscreencanvas"],
"compiler": {
"skipLibCheck": false
},
"newlineKind": "lf",
"dtsRollup": {
"enabled": true,
"untrimmedFilePath": "types/face-api.d.ts"
},
"docModel": { "enabled": false },
"tsdocMetadata": {
"enabled": false
},
"apiReport": { "enabled": false },
"messages": {
"compilerMessageReporting": {
"default": {
"logLevel": "warning"
}
},
"extractorMessageReporting": {
"default": {
"logLevel": "warning"
},
"ae-missing-release-tag": {
"logLevel": "none"
}
},
"tsdocMessageReporting": {
"default": {
"logLevel": "warning"
}
}
}
}

View File

@ -1,77 +0,0 @@
const fs = require('fs');
const log = require('@vladmandic/pilogger');
const Build = require('@vladmandic/build').Build;
const APIExtractor = require('@microsoft/api-extractor');
const regEx = [
{ search: 'types="@webgpu/types/dist"', replace: 'path="../src/types/webgpu.d.ts"' },
{ search: 'types="offscreencanvas"', replace: 'path="../src/types/offscreencanvas.d.ts"' },
];
function copyFile(src, dst) {
if (!fs.existsSync(src)) {
log.warn('Copy:', { input: src, output: dst });
return;
}
log.state('Copy:', { input: src, output: dst });
const buffer = fs.readFileSync(src);
fs.writeFileSync(dst, buffer);
}
function writeFile(str, dst) {
log.state('Write:', { output: dst });
fs.writeFileSync(dst, str);
}
function regExFile(src, entries) {
if (!fs.existsSync(src)) {
log.warn('Filter:', { src });
return;
}
log.state('Filter:', { input: src });
for (const entry of entries) {
const buffer = fs.readFileSync(src, 'UTF-8');
const lines = buffer.split(/\r?\n/);
const out = [];
for (const line of lines) {
if (line.includes(entry.search)) out.push(line.replace(entry.search, entry.replace));
else out.push(line);
}
fs.writeFileSync(src, out.join('\n'));
}
}
const apiIgnoreList = ['ae-forgotten-export', 'ae-unresolved-link', 'tsdoc-param-tag-missing-hyphen'];
async function main() {
// run production build
const build = new Build();
await build.run('production');
// patch tfjs typedefs
log.state('Copy:', { input: 'types/lib/dist/tfjs.esm.d.ts' });
copyFile('types/lib/dist/tfjs.esm.d.ts', 'dist/tfjs.esm.d.ts');
// run api-extractor to create typedef rollup
const extractorConfig = APIExtractor.ExtractorConfig.loadFileAndPrepare('api-extractor.json');
const extractorResult = APIExtractor.Extractor.invoke(extractorConfig, {
localBuild: true,
showVerboseMessages: false,
messageCallback: (msg) => {
msg.handled = true;
if (msg.logLevel === 'none' || msg.logLevel === 'verbose' || msg.logLevel === 'info') return;
if (msg.sourceFilePath?.includes('/node_modules/')) return;
if (apiIgnoreList.reduce((prev, curr) => prev || msg.messageId.includes(curr), false)) return;
log.data('API', { level: msg.logLevel, category: msg.category, id: msg.messageId, file: msg.sourceFilePath, line: msg.sourceFileLine, text: msg.text });
},
});
log.state('API-Extractor:', { succeeeded: extractorResult.succeeded, errors: extractorResult.errorCount, warnings: extractorResult.warningCount });
regExFile('types/face-api.d.ts', regEx);
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm-nobundle.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-gpu.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-wasm.d.ts');
log.info('FaceAPI Build complete...');
}
main();

View File

@ -11,7 +11,7 @@
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon"> <link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
<script src="./index.js" type="module"></script> <script src="./index.js" type="module"></script>
</head> </head>
<body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0; overflow-x: hidden;"> <body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0;">
<div id="log"></div> <div id="log"></div>
</body> </body>
</html> </html>

View File

@ -1,25 +1,25 @@
/** import * as faceapi from '../dist/face-api.esm.js';
* FaceAPI Demo for Browsers
* Loaded via `index.html`
*/
import * as faceapi from '../dist/face-api.esm.js'; // use when in dev mode
// import * as faceapi from '@vladmandic/face-api'; // use when downloading face-api as npm
// configuration options // configuration options
const modelPath = '../model/'; // path to model folder that will be loaded using http const modelPath = '../model/'; // path to model folder that will be loaded using http
// const modelPath = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model/'; // path to model folder that will be loaded using http // const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http
const imgSize = 800; // maximum image size in pixels const imgSize = 800; // maximum image size in pixels
const minScore = 0.3; // minimum score const minScore = 0.3; // minimum score
const maxResults = 10; // maximum number of results to return const maxResults = 10; // maximum number of results to return
const samples = ['sample1.jpg', 'sample2.jpg', 'sample3.jpg', 'sample4.jpg', 'sample5.jpg', 'sample6.jpg']; // sample images to be loaded using http const samples = ['sample1.jpg', 'sample2.jpg', 'sample3.jpg', 'sample4.jpg', 'sample5.jpg', 'sample6.jpg']; // sample images to be loaded using http
// helper function to pretty-print json object to string // helper function to pretty-print json object to string
const str = (json) => (json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : ''); function str(json) {
let text = '<font color="lightblue">';
text += json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '';
text += '</font>';
return text;
}
// helper function to print strings to html document as a log // helper function to print strings to html document as a log
function log(...txt) { function log(...txt) {
console.log(...txt); // eslint-disable-line no-console // eslint-disable-next-line no-console
console.log(...txt);
const div = document.getElementById('log'); const div = document.getElementById('log');
if (div) div.innerHTML += `<br>${txt}`; if (div) div.innerHTML += `<br>${txt}`;
} }
@ -33,9 +33,11 @@ function faces(name, title, id, data) {
canvas.style.position = 'absolute'; canvas.style.position = 'absolute';
canvas.style.left = `${img.offsetLeft}px`; canvas.style.left = `${img.offsetLeft}px`;
canvas.style.top = `${img.offsetTop}px`; canvas.style.top = `${img.offsetTop}px`;
// @ts-ignore
canvas.width = img.width; canvas.width = img.width;
// @ts-ignore
canvas.height = img.height; canvas.height = img.height;
const ctx = canvas.getContext('2d', { willReadFrequently: true }); const ctx = canvas.getContext('2d');
if (!ctx) return; if (!ctx) return;
// draw title // draw title
ctx.font = '1rem sans-serif'; ctx.font = '1rem sans-serif';
@ -51,7 +53,6 @@ function faces(name, title, id, data) {
ctx.beginPath(); ctx.beginPath();
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height); ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
ctx.stroke(); ctx.stroke();
// draw text labels
ctx.globalAlpha = 1; ctx.globalAlpha = 1;
ctx.fillText(`${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 18); ctx.fillText(`${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 18);
ctx.fillText(`${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 2); ctx.fillText(`${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 2);
@ -71,7 +72,8 @@ function faces(name, title, id, data) {
// helper function to draw processed image and its results // helper function to draw processed image and its results
function print(title, img, data) { function print(title, img, data) {
console.log('Results:', title, img, data); // eslint-disable-line no-console // eslint-disable-next-line no-console
console.log('Results:', title, img, data);
const el = new Image(); const el = new Image();
el.id = Math.floor(Math.random() * 100000).toString(); el.id = Math.floor(Math.random() * 100000).toString();
el.src = img; el.src = img;
@ -94,7 +96,7 @@ async function image(url) {
const canvas = document.createElement('canvas'); const canvas = document.createElement('canvas');
canvas.height = img.height; canvas.height = img.height;
canvas.width = img.width; canvas.width = img.width;
const ctx = canvas.getContext('2d', { willReadFrequently: true }); const ctx = canvas.getContext('2d');
if (ctx) ctx.drawImage(img, 0, 0, img.width, img.height); if (ctx) ctx.drawImage(img, 0, 0, img.width, img.height);
// return generated canvas to be used by tfjs during detection // return generated canvas to be used by tfjs during detection
resolve(canvas); resolve(canvas);
@ -109,23 +111,18 @@ async function main() {
log('FaceAPI Test'); log('FaceAPI Test');
// if you want to use wasm backend location for wasm binaries must be specified // if you want to use wasm backend location for wasm binaries must be specified
// await faceapi.tf?.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${faceapi.tf.version_core}/dist/`); // await faceapi.tf.setWasmPaths('../node_modules/@tensorflow/tfjs-backend-wasm/dist/');
// await faceapi.tf?.setBackend('wasm'); // await faceapi.tf.setBackend('wasm');
// log(`WASM SIMD: ${await faceapi.tf?.env().getAsync('WASM_HAS_SIMD_SUPPORT')} Threads: ${await faceapi.tf?.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') ? 'Multi' : 'Single'}`);
// default is webgl backend // default is webgl backend
await faceapi.tf.setBackend('webgl'); await faceapi.tf.setBackend('webgl');
await faceapi.tf.ready();
// tfjs optimizations
if (faceapi.tf?.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) faceapi.tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
await faceapi.tf.enableProdMode(); await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready(); await faceapi.tf.ready();
// check version // check version
log(`Version: FaceAPI ${str(faceapi?.version || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`); log(`Version: FaceAPI ${str(faceapi?.version.faceapi || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`); log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
// load face-api models // load face-api models
@ -143,9 +140,16 @@ async function main() {
const engine = await faceapi.tf.engine(); const engine = await faceapi.tf.engine();
log(`TF Engine State: ${str(engine.state)}`); log(`TF Engine State: ${str(engine.state)}`);
// const testT = faceapi.tf.tensor([0]);
// const testF = testT.toFloat();
// console.log(testT.print(), testF.print());
// testT.dispose();
// testF.dispose();
// loop through all images and try to process them // loop through all images and try to process them
log(`Start processing: ${samples.length} images ...<br>`); log(`Start processing: ${samples.length} images ...<br>`);
for (const img of samples) { for (const img of samples) {
// new line
document.body.appendChild(document.createElement('br')); document.body.appendChild(document.createElement('br'));
// load and resize image // load and resize image
const canvas = await image(img); const canvas = await image(img);
@ -159,7 +163,7 @@ async function main() {
.withFaceDescriptors() .withFaceDescriptors()
.withAgeAndGender(); .withAgeAndGender();
// print results to screen // print results to screen
print('TinyFace:', img, dataTinyYolo); print('TinyFace Detector', img, dataTinyYolo);
// actual model execution // actual model execution
const dataSSDMobileNet = await faceapi const dataSSDMobileNet = await faceapi
.detectAllFaces(canvas, optionsSSDMobileNet) .detectAllFaces(canvas, optionsSSDMobileNet)
@ -168,9 +172,11 @@ async function main() {
.withFaceDescriptors() .withFaceDescriptors()
.withAgeAndGender(); .withAgeAndGender();
// print results to screen // print results to screen
print('SSDMobileNet:', img, dataSSDMobileNet); print('SSD MobileNet', img, dataSSDMobileNet);
} catch (err) { } catch (err) {
log(`Image: ${img} Error during processing ${str(err)}`); log(`Image: ${img} Error during processing ${str(err)}`);
// eslint-disable-next-line no-console
console.error(err);
} }
} }
} }

View File

@ -1,20 +1,15 @@
/** // @ts-nocheck
* FaceAPI Demo for NodeJS
* - Uses external library [canvas](https://www.npmjs.com/package/canvas) to decode image
* - Loads image from provided param
* - Outputs results to console
*/
// canvas library provides full canvas (load/draw/write) functionality for nodejs
// must be installed manually as it just a demo dependency and not actual face-api dependency
const canvas = require('canvas'); // eslint-disable-line node/no-missing-require
const fs = require('fs'); const fs = require('fs');
const path = require('path');
const process = require('process'); const process = require('process');
const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api // eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars, @typescript-eslint/no-unused-vars
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode const tf = require('@tensorflow/tfjs-node');
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases) // eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const canvas = require('canvas');
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
const modelPathRoot = '../model'; const modelPathRoot = '../model';
const imgPathRoot = './demo'; // modify to include your sample images const imgPathRoot = './demo'; // modify to include your sample images
@ -57,9 +52,11 @@ async function main() {
faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData }); faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
await faceapi.tf.setBackend('tensorflow'); await faceapi.tf.setBackend('tensorflow');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready(); await faceapi.tf.ready();
log.state(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf?.getBackend()}`); log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
log.info('Loading FaceAPI models'); log.info('Loading FaceAPI models');
const modelPath = path.join(__dirname, modelPathRoot); const modelPath = path.join(__dirname, modelPathRoot);
@ -83,7 +80,7 @@ async function main() {
for (const face of result) print(face); for (const face of result) print(face);
} }
const t1 = process.hrtime.bigint(); const t1 = process.hrtime.bigint();
log.info('Processed', numImages, 'images in', Math.trunc(Number((t1 - t0).toString()) / 1000 / 1000), 'ms'); log.info('Processed', numImages, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
} else { } else {
const param = process.argv[2]; const param = process.argv[2];
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) { if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {

View File

@ -1,35 +0,0 @@
/**
* FaceAPI demo that loads two images and finds similarity most prominant face in each image
*/
const fs = require('fs');
const tf = require('@tensorflow/tfjs-node');
const faceapi = require('../dist/face-api.node');
let optionsSSDMobileNet;
const getDescriptors = async (imageFile) => {
const buffer = fs.readFileSync(imageFile);
const tensor = tf.node.decodeImage(buffer, 3);
const faces = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet)
.withFaceLandmarks()
.withFaceDescriptors();
tf.dispose(tensor);
return faces.map((face) => face.descriptor);
};
const main = async (file1, file2) => {
console.log('input images:', file1, file2); // eslint-disable-line no-console
await tf.ready();
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model');
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.5, maxResults: 1 });
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
const desc1 = await getDescriptors(file1);
const desc2 = await getDescriptors(file2);
const distance = faceapi.euclideanDistance(desc1[0], desc2[0]); // only compare first found face in each image
console.log('distance between most prominant detected faces:', distance); // eslint-disable-line no-console
console.log('similarity between most prominant detected faces:', 1 - distance); // eslint-disable-line no-console
};
main('demo/sample1.jpg', 'demo/sample2.jpg');

View File

@ -1,18 +1,11 @@
/**
* FaceAPI Demo for NodeJS
* - Uses external library [@canvas/image](https://www.npmjs.com/package/@canvas/image) to decode image
* - Loads image from provided param
* - Outputs results to console
*/
// @canvas/image can decode jpeg, png, webp
// must be installed manually as it just a demo dependency and not actual face-api dependency
const image = require('@canvas/image'); // eslint-disable-line node/no-missing-require
const fs = require('fs'); const fs = require('fs');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const tf = require('@tensorflow/tfjs-node');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const image = require('@canvas/image'); // @canvas/image can decode jpeg, png, webp
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
const modelPath = 'model/'; const modelPath = 'model/';
const imageFile = 'demo/sample1.jpg'; const imageFile = 'demo/sample1.jpg';
@ -48,7 +41,7 @@ async function main() {
.withFaceExpressions() .withFaceExpressions()
.withFaceDescriptors() .withFaceDescriptors()
.withAgeAndGender(); .withAgeAndGender();
log.data('results:', result.length); log.data('results:', result);
} }
main(); main();

View File

@ -1,84 +0,0 @@
/**
* FaceAPI Demo for NodeJS
* - Analyzes face descriptors from source (image file or folder containing multiple image files)
* - Analyzes face descriptor from target
* - Finds best match
*/
const fs = require('fs');
const path = require('path');
const log = require('@vladmandic/pilogger');
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
let optionsSSDMobileNet;
const minConfidence = 0.1;
const distanceThreshold = 0.5;
const modelPath = 'model';
const labeledFaceDescriptors = [];
async function initFaceAPI() {
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults: 1 });
}
async function getDescriptors(imageFile) {
const buffer = fs.readFileSync(imageFile);
const tensor = tf.node.decodeImage(buffer, 3);
const faces = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors();
tf.dispose(tensor);
return faces.map((face) => face.descriptor);
}
async function registerImage(inputFile) {
if (!inputFile.toLowerCase().endsWith('jpg') && !inputFile.toLowerCase().endsWith('png') && !inputFile.toLowerCase().endsWith('gif')) return;
log.data('Registered:', inputFile);
const descriptors = await getDescriptors(inputFile);
for (const descriptor of descriptors) {
const labeledFaceDescriptor = new faceapi.LabeledFaceDescriptors(inputFile, [descriptor]);
labeledFaceDescriptors.push(labeledFaceDescriptor);
}
}
async function findBestMatch(inputFile) {
const matcher = new faceapi.FaceMatcher(labeledFaceDescriptors, distanceThreshold);
const descriptors = await getDescriptors(inputFile);
const matches = [];
for (const descriptor of descriptors) {
const match = await matcher.findBestMatch(descriptor);
matches.push(match);
}
return matches;
}
async function main() {
log.header();
if (process.argv.length !== 4) {
log.error(process.argv[1], 'Expected <source image or folder> <target image>');
process.exit(1);
}
await initFaceAPI();
log.info('Input:', process.argv[2]);
if (fs.statSync(process.argv[2]).isFile()) {
await registerImage(process.argv[2]); // register image
} else if (fs.statSync(process.argv[2]).isDirectory()) {
const dir = fs.readdirSync(process.argv[2]);
for (const f of dir) await registerImage(path.join(process.argv[2], f)); // register all images in a folder
}
log.info('Comparing:', process.argv[3], 'Descriptors:', labeledFaceDescriptors.length);
if (labeledFaceDescriptors.length > 0) {
const bestMatch = await findBestMatch(process.argv[3]); // find best match to all registered images
log.data('Match:', bestMatch);
} else {
log.warn('No registered faces');
}
}
main();

View File

@ -1,16 +1,14 @@
/** // @ts-nocheck
* FaceAPI Demo for NodeJS
* - Used by `node-multiprocess.js`
*/
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
// workers actual import tfjs and faceapi modules // workers actual import tfjs and faceapi modules
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api // eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode const tf = require('@tensorflow/tfjs-node');
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases) const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
// options used by faceapi // options used by faceapi
const modelPathRoot = '../model'; const modelPathRoot = '../model';
@ -55,7 +53,7 @@ async function main() {
await faceapi.tf.enableProdMode(); await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false); await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready(); await faceapi.tf.ready();
log.state('Worker: PID:', process.pid, `TensorFlow/JS ${faceapi.tf.version_core} FaceAPI ${faceapi.version} Backend: ${faceapi.tf.getBackend()}`); log.state('Worker: PID:', process.pid, `TensorFlow/JS ${faceapi.tf.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf.getBackend()}`);
// and load and initialize facepi models // and load and initialize facepi models
const modelPath = path.join(__dirname, modelPathRoot); const modelPath = path.join(__dirname, modelPathRoot);

View File

@ -1,14 +1,11 @@
/** // @ts-nocheck
* FaceAPI Demo for NodeJS
* - Starts multiple worker processes and uses them as worker pool to process all input images
* - Images are enumerated in main process and sent for processing to worker processes via ipc
*/
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features
const child_process = require('child_process'); const child_process = require('child_process');
// note that main process does not need to import faceapi or tfjs at all as processing is done in a worker process // note that main process import faceapi or tfjs at all
const imgPathRoot = './demo'; // modify to include your sample images const imgPathRoot = './demo'; // modify to include your sample images
const numWorkers = 4; // how many workers will be started const numWorkers = 4; // how many workers will be started
@ -36,14 +33,14 @@ function waitCompletion() {
if (activeWorkers > 0) setImmediate(() => waitCompletion()); if (activeWorkers > 0) setImmediate(() => waitCompletion());
else { else {
t[1] = process.hrtime.bigint(); t[1] = process.hrtime.bigint();
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(Number(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(Number(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(Number(t[1] - t[2]) / numImages / 1000000), 'ms'); log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(parseInt(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(parseInt(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(parseInt(t[1] - t[2]) / numImages / 1000000), 'ms');
} }
} }
function measureLatency() { function measureLatency() {
t[3] = process.hrtime.bigint(); t[3] = process.hrtime.bigint();
const latencyInitialization = Math.trunc(Number(t[2] - t[0]) / 1000 / 1000); const latencyInitialization = Math.trunc(parseInt(t[2] - t[0]) / 1000 / 1000);
const latencyRoundTrip = Math.trunc(Number(t[3] - t[2]) / 1000 / 1000); const latencyRoundTrip = Math.trunc(parseInt(t[3] - t[2]) / 1000 / 1000);
log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip); log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip);
} }

View File

@ -1,31 +0,0 @@
/**
* FaceAPI Demo for NodeJS
* - Loads image
* - Outputs results to console
*/
const fs = require('fs');
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
async function main() {
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model'); // load models from a specific patch
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
await faceapi.nets.ageGenderNet.loadFromDisk('model');
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
await faceapi.nets.faceExpressionNet.loadFromDisk('model');
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.1, maxResults: 10 }); // set model options
const buffer = fs.readFileSync('demo/sample1.jpg'); // load jpg image as binary
const decodeT = faceapi.tf.node.decodeImage(buffer, 3); // decode binary buffer to rgb tensor
const expandT = faceapi.tf.expandDims(decodeT, 0); // add batch dimension to tensor
const result = await faceapi.detectAllFaces(expandT, options) // run detection
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
faceapi.tf.dispose([decodeT, expandT]); // dispose tensors to avoid memory leaks
console.log({ result }); // eslint-disable-line no-console
}
main();

View File

@ -1,53 +1,97 @@
/** // @ts-nocheck
* FaceAPI Demo for NodeJS using WASM
* - Loads WASM binaries from external CDN
* - Loads image
* - Outputs results to console
*/
const fs = require('fs'); const fs = require('fs');
const image = require('@canvas/image'); // eslint-disable-line node/no-missing-require const process = require('process');
const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars, @typescript-eslint/no-unused-vars
const tf = require('@tensorflow/tfjs'); const tf = require('@tensorflow/tfjs');
const wasm = require('@tensorflow/tfjs-backend-wasm'); // eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const faceapi = require('../dist/face-api.node-wasm.js'); // use this when using face-api in dev mode require('@tensorflow/tfjs-backend-wasm');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars
const canvas = require('canvas');
const faceapi = require('../dist/face-api.node-cpu.js'); // this is equivalent to '@vladmandic/faceapi'
async function readImage(imageFile) { const modelPathRoot = '../model';
const buffer = fs.readFileSync(imageFile); // read image from disk const imgPathRoot = './demo'; // modify to include your sample images
const canvas = await image.imageFromBuffer(buffer); // decode to canvas const minConfidence = 0.15;
const imageData = image.getImageData(canvas); // read decoded image data from canvas const maxResults = 5;
const tensor = tf.tidy(() => { // create tensor from image data let optionsSSDMobileNet;
const data = tf.tensor(Array.from(imageData?.data || []), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array and flip to height x width
const channels = tf.split(data, 4, 2); // split rgba to channels async function image(input) {
const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb const img = await canvas.loadImage(input);
const squeeze = tf.squeeze(rgb); // move extra dim from the end of tensor and use it as batch number instead const c = canvas.createCanvas(img.width, img.height);
return squeeze; const ctx = c.getContext('2d');
}); ctx.drawImage(img, 0, 0, img.width, img.height);
console.log(`Image: ${imageFile} [${canvas.width} x ${canvas.height} Tensor: ${tensor.shape}, Size: ${tensor.size}`); // eslint-disable-line no-console // const out = fs.createWriteStream('test.jpg');
return tensor; // const stream = c.createJPEGStream({ quality: 0.6, progressive: true, chromaSubsampling: true });
// stream.pipe(out);
return c;
} }
async function main() { async function detect(tensor) {
wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/', true); const result = await faceapi
await tf.setBackend('wasm'); .detectAllFaces(tensor, optionsSSDMobileNet)
await tf.ready();
console.log(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf.getBackend()}`); // eslint-disable-line no-console
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model'); // load models from a specific patch
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
await faceapi.nets.ageGenderNet.loadFromDisk('model');
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
await faceapi.nets.faceExpressionNet.loadFromDisk('model');
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.1, maxResults: 10 }); // set model options
const tensor = await readImage('demo/sample1.jpg');
const t0 = performance.now();
const result = await faceapi.detectAllFaces(tensor, options) // run detection
.withFaceLandmarks() .withFaceLandmarks()
.withFaceExpressions() .withFaceExpressions()
.withFaceDescriptors() .withFaceDescriptors()
.withAgeAndGender(); .withAgeAndGender();
tf.dispose(tensor); // dispose tensors to avoid memory leaks return result;
const t1 = performance.now(); }
console.log('Time', t1 - t0); // eslint-disable-line no-console
console.log('Result', result); // eslint-disable-line no-console function print(face) {
const expression = Object.entries(face.expressions).reduce((acc, val) => ((val[1] > acc[1]) ? val : acc), ['', 0]);
const box = [face.alignedRect._box._x, face.alignedRect._box._y, face.alignedRect._box._width, face.alignedRect._box._height];
const gender = `Gender: ${Math.round(100 * face.genderProbability)}% ${face.gender}`;
log.data(`Detection confidence: ${Math.round(100 * face.detection._score)}% ${gender} Age: ${Math.round(10 * face.age) / 10} Expression: ${Math.round(100 * expression[1])}% ${expression[0]} Box: ${box.map((a) => Math.round(a))}`);
}
async function main() {
log.header();
log.info('FaceAPI single-process test');
faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
await faceapi.tf.setBackend('wasm');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
log.info('Loading FaceAPI models');
const modelPath = path.join(__dirname, modelPathRoot);
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
if (process.argv.length !== 3) {
const t0 = process.hrtime.bigint();
const dir = fs.readdirSync(imgPathRoot);
let numImages = 0;
for (const img of dir) {
if (!img.toLocaleLowerCase().endsWith('.jpg')) continue;
numImages += 1;
const c = await image(path.join(imgPathRoot, img));
const result = await detect(c);
log.data('Image:', img, 'Detected faces:', result.length);
for (const face of result) print(face);
}
const t1 = process.hrtime.bigint();
log.info('Processed', numImages, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
} else {
const param = process.argv[2];
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {
const c = await image(param);
const result = await detect(c);
log.data('Image:', param, 'Detected faces:', result.length);
for (const face of result) print(face);
}
}
} }
main(); main();

View File

@ -1,25 +1,21 @@
/** // @ts-nocheck
* FaceAPI Demo for NodeJS
* - Uses external library [node-fetch](https://www.npmjs.com/package/node-fetch) to load images via http
* - Loads image from provided param
* - Outputs results to console
*/
const fs = require('fs'); const fs = require('fs');
const process = require('process'); const process = require('process');
const path = require('path'); const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api const fetch = require('node-fetch').default;
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode // eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases) const tf = require('@tensorflow/tfjs-node');
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
const modelPathRoot = '../model'; const modelPathRoot = '../model';
const imgPathRoot = './demo'; // modify to include your sample images const imgPathRoot = './demo'; // modify to include your sample images
const minConfidence = 0.15; const minConfidence = 0.15;
const maxResults = 5; const maxResults = 5;
let optionsSSDMobileNet; let optionsSSDMobileNet;
let fetch; // dynamically imported later
async function image(input) { async function image(input) {
// read input image file and create tensor to be used for processing // read input image file and create tensor to be used for processing
@ -93,13 +89,12 @@ async function main() {
log.header(); log.header();
log.info('FaceAPI single-process test'); log.info('FaceAPI single-process test');
// eslint-disable-next-line node/no-extraneous-import
fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-missing-import
await faceapi.tf.setBackend('tensorflow'); await faceapi.tf.setBackend('tensorflow');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready(); await faceapi.tf.ready();
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version} Backend: ${faceapi.tf?.getBackend()}`); log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
log.info('Loading FaceAPI models'); log.info('Loading FaceAPI models');
const modelPath = path.join(__dirname, modelPathRoot); const modelPath = path.join(__dirname, modelPathRoot);
@ -110,7 +105,7 @@ async function main() {
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath); await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults }); optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
if (process.argv.length !== 4) { if (process.argv.length !== 3) {
const t0 = process.hrtime.bigint(); const t0 = process.hrtime.bigint();
const dir = fs.readdirSync(imgPathRoot); const dir = fs.readdirSync(imgPathRoot);
for (const img of dir) { for (const img of dir) {
@ -122,7 +117,7 @@ async function main() {
tensor.dispose(); tensor.dispose();
} }
const t1 = process.hrtime.bigint(); const t1 = process.hrtime.bigint();
log.info('Processed', dir.length, 'images in', Math.trunc(Number((t1 - t0)) / 1000 / 1000), 'ms'); log.info('Processed', dir.length, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
} else { } else {
const param = process.argv[2]; const param = process.argv[2];
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) { if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {

View File

@ -1,14 +1,8 @@
/** import * as faceapi from '../dist/face-api.esm.js';
* FaceAPI Demo for Browsers
* Loaded via `webcam.html`
*/
import * as faceapi from '../dist/face-api.esm.js'; // use when in dev mode
// import * as faceapi from '@vladmandic/face-api'; // use when downloading face-api as npm
// configuration options // configuration options
const modelPath = '../model/'; // path to model folder that will be loaded using http const modelPath = '../model/'; // path to model folder that will be loaded using http
// const modelPath = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model/'; // path to model folder that will be loaded using http // const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http
const minScore = 0.2; // minimum score const minScore = 0.2; // minimum score
const maxResults = 5; // maximum number of results to return const maxResults = 5; // maximum number of results to return
let optionsSSDMobileNet; let optionsSSDMobileNet;
@ -23,14 +17,15 @@ function str(json) {
// helper function to print strings to html document as a log // helper function to print strings to html document as a log
function log(...txt) { function log(...txt) {
console.log(...txt); // eslint-disable-line no-console // eslint-disable-next-line no-console
console.log(...txt);
const div = document.getElementById('log'); const div = document.getElementById('log');
if (div) div.innerHTML += `<br>${txt}`; if (div) div.innerHTML += `<br>${txt}`;
} }
// helper function to draw detected faces // helper function to draw detected faces
function drawFaces(canvas, data, fps) { function drawFaces(canvas, data, fps) {
const ctx = canvas.getContext('2d', { willReadFrequently: true }); const ctx = canvas.getContext('2d');
if (!ctx) return; if (!ctx) return;
ctx.clearRect(0, 0, canvas.width, canvas.height); ctx.clearRect(0, 0, canvas.width, canvas.height);
// draw title // draw title
@ -47,18 +42,18 @@ function drawFaces(canvas, data, fps) {
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height); ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
ctx.stroke(); ctx.stroke();
ctx.globalAlpha = 1; ctx.globalAlpha = 1;
// draw text labels // const expression = person.expressions.sort((a, b) => Object.values(a)[0] - Object.values(b)[0]);
const expression = Object.entries(person.expressions).sort((a, b) => b[1] - a[1]); const expression = Object.entries(person.expressions).sort((a, b) => b[1] - a[1]);
ctx.fillStyle = 'black'; ctx.fillStyle = 'black';
ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 59); ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 59);
ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 41); ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 41);
ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 23); ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 23);
ctx.fillText(`roll:${person.angle.roll}° pitch:${person.angle.pitch}° yaw:${person.angle.yaw}°`, person.detection.box.x, person.detection.box.y - 5); ctx.fillText(`roll:${person.angle.roll.toFixed(3)} pitch:${person.angle.pitch.toFixed(3)} yaw:${person.angle.yaw.toFixed(3)}`, person.detection.box.x, person.detection.box.y - 5);
ctx.fillStyle = 'lightblue'; ctx.fillStyle = 'lightblue';
ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 60); ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 60);
ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 42); ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 42);
ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 24); ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 24);
ctx.fillText(`roll:${person.angle.roll}° pitch:${person.angle.pitch}° yaw:${person.angle.yaw}°`, person.detection.box.x, person.detection.box.y - 6); ctx.fillText(`roll:${person.angle.roll.toFixed(3)} pitch:${person.angle.pitch.toFixed(3)} yaw:${person.angle.yaw.toFixed(3)}`, person.detection.box.x, person.detection.box.y - 6);
// draw face points for each face // draw face points for each face
ctx.globalAlpha = 0.8; ctx.globalAlpha = 0.8;
ctx.fillStyle = 'lightblue'; ctx.fillStyle = 'lightblue';
@ -66,6 +61,7 @@ function drawFaces(canvas, data, fps) {
for (let i = 0; i < person.landmarks.positions.length; i++) { for (let i = 0; i < person.landmarks.positions.length; i++) {
ctx.beginPath(); ctx.beginPath();
ctx.arc(person.landmarks.positions[i].x, person.landmarks.positions[i].y, pointSize, 0, 2 * Math.PI); ctx.arc(person.landmarks.positions[i].x, person.landmarks.positions[i].y, pointSize, 0, 2 * Math.PI);
// ctx.fillText(`${i}`, person.landmarks.positions[i].x + 4, person.landmarks.positions[i].y + 4);
ctx.fill(); ctx.fill();
} }
} }
@ -99,6 +95,7 @@ async function setupCamera() {
const canvas = document.getElementById('canvas'); const canvas = document.getElementById('canvas');
if (!video || !canvas) return null; if (!video || !canvas) return null;
let msg = '';
log('Setting up camera'); log('Setting up camera');
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https // setup webcam. note that navigator.mediaDevices requires that page is accessed via https
if (!navigator.mediaDevices) { if (!navigator.mediaDevices) {
@ -106,19 +103,23 @@ async function setupCamera() {
return null; return null;
} }
let stream; let stream;
const constraints = { audio: false, video: { facingMode: 'user', resizeMode: 'crop-and-scale' } }; const constraints = {
audio: false,
video: { facingMode: 'user', resizeMode: 'crop-and-scale' },
};
if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth }; if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth };
else constraints.video.height = { ideal: window.innerHeight }; else constraints.video.height = { ideal: window.innerHeight };
try { try {
stream = await navigator.mediaDevices.getUserMedia(constraints); stream = await navigator.mediaDevices.getUserMedia(constraints);
} catch (err) { } catch (err) {
if (err.name === 'PermissionDeniedError' || err.name === 'NotAllowedError') log(`Camera Error: camera permission denied: ${err.message || err}`); if (err.name === 'PermissionDeniedError' || err.name === 'NotAllowedError') msg = 'camera permission denied';
if (err.name === 'SourceUnavailableError') log(`Camera Error: camera not available: ${err.message || err}`); else if (err.name === 'SourceUnavailableError') msg = 'camera not available';
log(`Camera Error: ${msg}: ${err.message || err}`);
return null; return null;
} }
if (stream) { // @ts-ignore
video.srcObject = stream; if (stream) video.srcObject = stream;
} else { else {
log('Camera Error: stream empty'); log('Camera Error: stream empty');
return null; return null;
} }
@ -127,23 +128,31 @@ async function setupCamera() {
if (settings.deviceId) delete settings.deviceId; if (settings.deviceId) delete settings.deviceId;
if (settings.groupId) delete settings.groupId; if (settings.groupId) delete settings.groupId;
if (settings.aspectRatio) settings.aspectRatio = Math.trunc(100 * settings.aspectRatio) / 100; if (settings.aspectRatio) settings.aspectRatio = Math.trunc(100 * settings.aspectRatio) / 100;
log(`Camera active: ${track.label}`); log(`Camera active: ${track.label}`); // ${str(constraints)}
log(`Camera settings: ${str(settings)}`); log(`Camera settings: ${str(settings)}`);
canvas.addEventListener('click', () => { canvas.addEventListener('click', () => {
// @ts-ignore
if (video && video.readyState >= 2) { if (video && video.readyState >= 2) {
// @ts-ignore
if (video.paused) { if (video.paused) {
// @ts-ignore
video.play(); video.play();
detectVideo(video, canvas); detectVideo(video, canvas);
} else { } else {
// @ts-ignore
video.pause(); video.pause();
} }
} }
// @ts-ignore
log(`Camera state: ${video.paused ? 'paused' : 'playing'}`); log(`Camera state: ${video.paused ? 'paused' : 'playing'}`);
}); });
return new Promise((resolve) => { return new Promise((resolve) => {
video.onloadeddata = async () => { video.onloadeddata = async () => {
// @ts-ignore
canvas.width = video.videoWidth; canvas.width = video.videoWidth;
// @ts-ignore
canvas.height = video.videoHeight; canvas.height = video.videoHeight;
// @ts-ignore
video.play(); video.play();
detectVideo(video, canvas); detectVideo(video, canvas);
resolve(true); resolve(true);
@ -161,6 +170,7 @@ async function setupFaceAPI() {
await faceapi.nets.faceRecognitionNet.load(modelPath); await faceapi.nets.faceRecognitionNet.load(modelPath);
await faceapi.nets.faceExpressionNet.load(modelPath); await faceapi.nets.faceExpressionNet.load(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults }); optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
// check tf engine state // check tf engine state
log(`Models loaded: ${str(faceapi.tf.engine().state.numTensors)} tensors`); log(`Models loaded: ${str(faceapi.tf.engine().state.numTensors)} tensors`);
} }
@ -170,21 +180,19 @@ async function main() {
log('FaceAPI WebCam Test'); log('FaceAPI WebCam Test');
// if you want to use wasm backend location for wasm binaries must be specified // if you want to use wasm backend location for wasm binaries must be specified
// await faceapi.tf?.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${faceapi.tf.version_core}/dist/`); // await faceapi.tf.setWasmPaths('../node_modules/@tensorflow/tfjs-backend-wasm/dist/');
// await faceapi.tf?.setBackend('wasm'); // await faceapi.tf.setBackend('wasm');
// log(`WASM SIMD: ${await faceapi.tf?.env().getAsync('WASM_HAS_SIMD_SUPPORT')} Threads: ${await faceapi.tf?.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') ? 'Multi' : 'Single'}`);
// default is webgl backend // default is webgl backend
await faceapi.tf.setBackend('webgl'); await faceapi.tf.setBackend('webgl');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready(); await faceapi.tf.ready();
// tfjs optimizations
if (faceapi.tf?.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) faceapi.tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
// check version // check version
log(`Version: FaceAPI ${str(faceapi?.version || '(not loaded)')} TensorFlow/JS ${str(faceapi.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi.tf?.getBackend() || '(not loaded)')}`); log(`Version: FaceAPI ${str(faceapi?.version.faceapi || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
// log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
await setupFaceAPI(); await setupFaceAPI();
await setupCamera(); await setupCamera();

1
dist/face-api.d.ts vendored
View File

@ -1 +0,0 @@
export * from '../types/face-api';

View File

@ -1 +0,0 @@
export * from '../types/face-api';

File diff suppressed because one or more lines are too long

7
dist/face-api.esm-nobundle.js.map vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -1 +0,0 @@
export * from '../types/face-api';

63384
dist/face-api.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

63391
dist/face-api.js vendored

File diff suppressed because one or more lines are too long

7
dist/face-api.js.map vendored Normal file

File diff suppressed because one or more lines are too long

4708
dist/face-api.node-cpu.js vendored Normal file

File diff suppressed because it is too large Load Diff

7
dist/face-api.node-cpu.js.map vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -1 +0,0 @@
export * from '../types/face-api';

File diff suppressed because one or more lines are too long

7
dist/face-api.node-gpu.js.map vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -1 +0,0 @@
export * from '../types/face-api';

File diff suppressed because one or more lines are too long

View File

@ -1 +0,0 @@
export * from '../types/face-api';

4703
dist/face-api.node.js vendored

File diff suppressed because one or more lines are too long

7
dist/face-api.node.js.map vendored Normal file

File diff suppressed because one or more lines are too long

28
dist/tfjs.esm.d.ts vendored
View File

@ -1,28 +0,0 @@
/*
import '@tensorflow/tfjs-core';
import '@tensorflow/tfjs-core/dist/types';
import '@tensorflow/tfjs-core/dist/register_all_gradients';
import '@tensorflow/tfjs-core/dist/public/chained_ops/register_all_chained_ops';
import '@tensorflow/tfjs-data';
import '@tensorflow/tfjs-layers';
import '@tensorflow/tfjs-converter';
import '@tensorflow/tfjs-backend-cpu';
import '@tensorflow/tfjs-backend-webgl';
import '@tensorflow/tfjs-backend-wasm';
import '@tensorflow/tfjs-backend-webgpu';
*/
export declare const version: {
'tfjs-core': string;
'tfjs-backend-cpu': string;
'tfjs-backend-webgl': string;
'tfjs-data': string;
'tfjs-layers': string;
'tfjs-converter': string;
tfjs: string;
};
export { io, browser, image } from '@tensorflow/tfjs-core';
export { tensor, tidy, softmax, unstack, relu, add, conv2d, cast, zeros, concat, avgPool, stack, fill, transpose, tensor1d, tensor2d, tensor3d, tensor4d, maxPool, matMul, mul, sub, scalar } from '@tensorflow/tfjs-core';
export { div, pad, slice, reshape, slice3d, expandDims, depthwiseConv2d, separableConv2d, sigmoid, exp, tile, batchNorm, clipByValue } from '@tensorflow/tfjs-core';
export { ENV, Variable, Tensor, TensorLike, Rank, Tensor1D, Tensor2D, Tensor3D, Tensor4D, Tensor5D, NamedTensorMap } from '@tensorflow/tfjs-core';

61520
dist/tfjs.esm.js vendored

File diff suppressed because one or more lines are too long

7
dist/tfjs.esm.js.map vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -1,9 +0,0 @@
export declare const version: {
'tfjs-core': string;
'tfjs-backend-cpu': string;
'tfjs-backend-webgl': string;
'tfjs-data': string;
'tfjs-layers': string;
'tfjs-converter': string;
tfjs: string;
};

View File

@ -1,7 +0,0 @@
/*
Face-API
homepage: <https://github.com/vladmandic/face-api>
author: <https://github.com/vladmandic>'
*/
var e="4.22.0";var s="4.22.0";var t="4.22.0";var n="4.22.0";var i="4.22.0";var w={tfjs:e,"tfjs-core":e,"tfjs-converter":s,"tfjs-backend-cpu":t,"tfjs-backend-webgl":n,"tfjs-backend-wasm":i};export{w as version};

View File

@ -1,12 +1,12 @@
{ {
"name": "@vladmandic/face-api", "name": "@vladmandic/face-api",
"version": "1.7.15", "version": "1.3.1",
"description": "FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS", "description": "FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS",
"sideEffects": false, "sideEffects": false,
"main": "dist/face-api.node.js", "main": "dist/face-api.node.js",
"module": "dist/face-api.esm.js", "module": "dist/face-api.esm.js",
"browser": "dist/face-api.esm.js", "browser": "dist/face-api.esm.js",
"types": "types/face-api.d.ts", "types": "types/index.d.ts",
"author": "Vladimir Mandic <mandic00@live.com>", "author": "Vladimir Mandic <mandic00@live.com>",
"bugs": { "bugs": {
"url": "https://github.com/vladmandic/face-api/issues" "url": "https://github.com/vladmandic/face-api/issues"
@ -14,17 +14,17 @@
"homepage": "https://vladmandic.github.io/face-api/demo/webcam.html", "homepage": "https://vladmandic.github.io/face-api/demo/webcam.html",
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">=14.0.0" "node": ">=12.0.0"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/vladmandic/face-api.git" "url": "git+https://github.com/vladmandic/face-api.git"
}, },
"scripts": { "scripts": {
"start": "node --no-warnings demo/node.js", "start": "node --trace-warnings demo/node.js",
"build": "node build.js", "dev": "node --trace-warnings server/serve.js",
"dev": "build --profile development", "build": "rimraf dist/* types/* typedoc/* && node server/build.js",
"lint": "eslint src/ demo/", "lint": "eslint src/**/* demo/*.js server/*.js",
"test": "node --trace-warnings test/test-node.js", "test": "node --trace-warnings test/test-node.js",
"scan": "npx auditjs@latest ossi --dev --quiet" "scan": "npx auditjs@latest ossi --dev --quiet"
}, },
@ -42,38 +42,31 @@
"tfjs" "tfjs"
], ],
"devDependencies": { "devDependencies": {
"@canvas/image": "^2.0.0", "@canvas/image": "^1.0.1",
"@microsoft/api-extractor": "^7.49.2", "@tensorflow/tfjs": "^3.7.0",
"@tensorflow/tfjs": "^4.22.0", "@tensorflow/tfjs-backend-wasm": "^3.7.0",
"@tensorflow/tfjs-backend-cpu": "^4.22.0", "@tensorflow/tfjs-node": "^3.7.0",
"@tensorflow/tfjs-backend-wasm": "^4.22.0", "@tensorflow/tfjs-node-gpu": "^3.7.0",
"@tensorflow/tfjs-backend-webgl": "^4.22.0", "@types/node": "^15.12.3",
"@tensorflow/tfjs-backend-webgpu": "4.22.0", "@typescript-eslint/eslint-plugin": "^4.27.0",
"@tensorflow/tfjs-converter": "^4.22.0", "@typescript-eslint/parser": "^4.27.0",
"@tensorflow/tfjs-core": "^4.22.0", "@vladmandic/pilogger": "^0.2.17",
"@tensorflow/tfjs-data": "^4.22.0", "canvas": "^2.8.0",
"@tensorflow/tfjs-layers": "^4.22.0", "chokidar": "^3.5.2",
"@tensorflow/tfjs-node": "^4.22.0", "dayjs": "^1.10.5",
"@tensorflow/tfjs-node-gpu": "^4.22.0", "esbuild": "^0.12.9",
"@types/node": "^22.13.1", "eslint": "^7.28.0",
"@types/offscreencanvas": "^2019.7.3", "eslint-config-airbnb-base": "^14.2.1",
"@typescript-eslint/eslint-plugin": "^8.5.0", "eslint-plugin-import": "^2.23.4",
"@typescript-eslint/parser": "^8.5.0", "eslint-plugin-json": "^3.0.0",
"@vladmandic/build": "^0.10.2",
"@vladmandic/pilogger": "^0.5.1",
"ajv": "^8.17.1",
"esbuild": "^0.24.2",
"eslint": "8.57.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-plugin-import": "^2.30.0",
"eslint-plugin-json": "^4.0.1",
"eslint-plugin-node": "^11.1.0", "eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^7.1.0", "eslint-plugin-promise": "^5.1.0",
"node-fetch": "^3.3.2", "node-fetch": "^2.6.1",
"rimraf": "^6.0.1", "rimraf": "^3.0.2",
"seedrandom": "^3.0.5", "seedrandom": "^3.0.5",
"tslib": "^2.8.1", "simple-git": "^2.40.0",
"typedoc": "^0.27.6", "tslib": "^2.3.0",
"typescript": "5.7.3" "typedoc": "^0.21.0",
"typescript": "4.3.4"
} }
} }

260
server/build.js Normal file
View File

@ -0,0 +1,260 @@
/* eslint-disable import/no-extraneous-dependencies */
/* eslint-disable node/no-unpublished-require */
const esbuild = require('esbuild');
const ts = require('typescript');
const log = require('@vladmandic/pilogger');
const TypeDoc = require('typedoc');
const { ESLint } = require('eslint');
const tfjs = require('@tensorflow/tfjs/package.json');
const changelog = require('./changelog');
const banner = { js: `
/*
Face-API
homepage: <https://github.com/vladmandic/face-api>
author: <https://github.com/vladmandic>'
*/
` };
let td = null;
let eslint = null;
// tsc configuration
const tsconfig = {
noEmitOnError: false,
target: ts.ScriptTarget.ES2018,
module: ts.ModuleKind.ES2020,
// outFile: "dist/face-api.d.ts",
outDir: 'types/',
declaration: true,
emitDeclarationOnly: true,
emitDecoratorMetadata: true,
experimentalDecorators: true,
skipLibCheck: true,
strictNullChecks: true,
baseUrl: './',
paths: {
tslib: ['node_modules/tslib/tslib.d.ts'],
},
};
// common configuration
const lintLocations = ['server/', 'demo/', 'src/', 'test/'];
// common configuration
const common = {
banner,
minifyWhitespace: false,
minifyIdentifiers: false,
minifySyntax: false,
bundle: true,
sourcemap: true,
metafile: true,
logLevel: 'error',
target: 'es2018',
// tsconfig: './tsconfig.json',
};
const targets = {
node: {
tfjs: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/tfjs/tf-node.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.node.js',
external: ['@tensorflow'],
},
},
nodeGPU: {
tfjs: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/tfjs/tf-node-gpu.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.node-gpu.js',
external: ['@tensorflow'],
},
},
nodeCPU: {
tfjs: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/tfjs/tf-node-cpu.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.node-cpu.js',
external: ['@tensorflow'],
},
},
browserNoBundle: {
tfjs: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/tfjs/tf-browser.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['fs', 'buffer', 'util', 'os', '@tensorflow'],
},
esm: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.esm-nobundle.js',
external: ['fs', 'buffer', 'util', 'os', '@tensorflow', 'tfjs.esm.js'],
},
},
browserBundle: {
tfjs: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/tfjs/tf-browser.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['fs', 'buffer', 'util', 'os'],
},
iife: {
platform: 'browser',
format: 'iife',
globalName: 'faceapi',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.js',
external: ['fs', 'buffer', 'util', 'os'],
},
esm: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.esm.js',
external: ['fs', 'buffer', 'util', 'os'],
},
},
};
async function getStats(json) {
const stats = {};
if (json && json.metafile.inputs && json.metafile.outputs) {
for (const [key, val] of Object.entries(json.metafile.inputs)) {
if (key.startsWith('node_modules')) {
stats.modules = (stats.modules || 0) + 1;
stats.moduleBytes = (stats.moduleBytes || 0) + val.bytes;
} else {
stats.imports = (stats.imports || 0) + 1;
stats.importBytes = (stats.importBytes || 0) + val.bytes;
}
}
const files = [];
for (const [key, val] of Object.entries(json.metafile.outputs)) {
if (!key.endsWith('.map')) {
files.push(key);
stats.outputBytes = (stats.outputBytes || 0) + val.bytes;
}
}
stats.outputFiles = files.join(', ');
}
return stats;
}
function typings(fileNames, options) {
log.info('Compile typings:', fileNames);
const program = ts.createProgram(fileNames, options);
const emit = program.emit();
const diag = ts
.getPreEmitDiagnostics(program)
.concat(emit.diagnostics);
for (const info of diag) {
// @ts-ignore
const msg = info.messageText.messageText || info.messageText;
if (msg.includes('package.json')) continue;
if (msg.includes('Expected 0 arguments, but got 1')) continue;
if (info.file) {
const pos = info.file.getLineAndCharacterOfPosition(info.start || 0);
log.error(`TSC: ${info.file.fileName} [${pos.line + 1},${pos.character + 1}]:`, msg);
} else {
log.error('TSC:', msg);
}
}
}
async function lint() {
log.info('Running Linter:', lintLocations);
if (!eslint) {
eslint = new ESLint();
}
const results = await eslint.lintFiles(lintLocations);
const errors = results.reduce((prev, curr) => prev += curr.errorCount, 0);
const warnings = results.reduce((prev, curr) => prev += curr.warningCount, 0);
log.info('Linter complete: files:', results.length, 'errors:', errors, 'warnings:', warnings);
if (errors > 0 || warnings > 0) {
const formatter = await eslint.loadFormatter('stylish');
const text = formatter.format(results);
log.warn(text);
}
}
async function typedoc(entryPoint) {
log.info('Generate TypeDocs:', entryPoint);
if (!td) {
td = new TypeDoc.Application();
td.options.addReader(new TypeDoc.TSConfigReader());
td.bootstrap({ entryPoints: entryPoint });
}
const project = td.convert();
const result = project ? await td.generateDocs(project, 'typedoc') : null;
if (result) log.warn('TypeDoc:', result);
}
// rebuild on file change
async function build(f, msg, dev = false) {
log.info('Build: file', msg, f, 'target:', common.target);
try {
// rebuild all target groups and types
for (const [targetGroupName, targetGroup] of Object.entries(targets)) {
for (const [targetName, targetOptions] of Object.entries(targetGroup)) {
// if triggered from watch mode, rebuild only browser bundle
// if ((require.main !== module) && (targetGroupName !== 'browserBundle')) continue;
// @ts-ignore
const meta = await esbuild.build({ ...common, ...targetOptions });
const stats = await getStats(meta);
log.state(`Build for: ${targetGroupName} type: ${targetName}:`, stats);
}
}
} catch (err) {
// catch errors and print where it occured
log.error('Build error', JSON.stringify(err.errors || err, null, 2));
if (require.main === module) process.exit(1);
}
if (!dev) { // only for prod builds, skipped for dev build
await lint(); // run linter
await typings(targets.browserBundle.esm.entryPoints, tsconfig); // generate typings
await changelog.update('../CHANGELOG.md'); // generate changelog
await typedoc(targets.browserBundle.esm.entryPoints); // generate typedoc
}
if (require.main === module) process.exit(0);
}
if (require.main === module) {
log.header();
log.info(`Toolchain: tfjs: ${tfjs.version} esbuild ${esbuild.version}; typescript ${ts.version}; typedoc: ${TypeDoc.Application.VERSION} eslint: ${ESLint.version}`);
build('all', 'startup');
} else {
exports.build = build;
}

58
server/changelog.js Normal file
View File

@ -0,0 +1,58 @@
const fs = require('fs');
const path = require('path');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const dayjs = require('dayjs');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const simpleGit = require('simple-git/promise');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const logger = require('@vladmandic/pilogger');
const app = require('../package.json');
const git = simpleGit();
let text = `# ${app.name}
Version: **${app.version}**
Description: **${app.description}**
Author: **${app.author}**
License: **${app.license}** </LICENSE>
Repository: **<${app.repository.url}>**
## Changelog
`;
async function update(f) {
const gitLog = await git.log();
// @ts-ignore
const log = gitLog.all.sort((a, b) => (new Date(b.date).getTime() - new Date(a.date).getTime()));
let previous = '';
const headings = [];
for (const l of log) {
const msg = l.message.toLowerCase();
if ((l.refs !== '') || msg.match(/^[0-99].[0-99].[0-99]/)) {
const dt = dayjs(l.date).format('YYYY/MM/DD');
let ver = msg.match(/[0-99].[0-99].[0-99]/) ? msg : l.refs;
ver = ver.replace('tag: v', '').replace('tag: ', 'release: ').split(',')[0];
const heading = `\n### **${ver}** ${dt} ${l.author_email}\n\n`;
if (!headings.includes(heading) && !ver.startsWith('tag')) {
headings.push(heading);
text += heading;
}
} else if ((msg.length > 2) && !msg.startsWith('update') && (previous !== msg)) {
previous = msg;
text += `- ${msg}\n`;
}
}
const name = path.join(__dirname, f);
fs.writeFileSync(name, text);
logger.info('Update Change log:', [name]);
}
if (require.main === module) {
update('../CHANGELOG.md');
} else {
exports.update = update;
}

31
server/https.crt Normal file
View File

@ -0,0 +1,31 @@
-----BEGIN CERTIFICATE-----
MIIFazCCA1OgAwIBAgIUKQKodDBJnuweJs5IcTyL4NIp3vgwDQYJKoZIhvcNAQEL
BQAwRTELMAkGA1UEBhMCVVMxEDAOBgNVBAgMB0Zsb3JpZGExDjAMBgNVBAcMBU1p
YW1pMRQwEgYDVQQKDAtAdmxhZG1hbmRpYzAeFw0yMDExMDcxNTE3NDNaFw0yMTEx
MDcxNTE3NDNaMEUxCzAJBgNVBAYTAlVTMRAwDgYDVQQIDAdGbG9yaWRhMQ4wDAYD
VQQHDAVNaWFtaTEUMBIGA1UECgwLQHZsYWRtYW5kaWMwggIiMA0GCSqGSIb3DQEB
AQUAA4ICDwAwggIKAoICAQDSC88PF8NyLkagK5mAZ/d739SOU16l2Cx3zE35zZQh
O29+1L4L+oMksLYipo+FMgtGO+MSzFsvGgKCs2sDSdfyoNSTZ3QaN4BAZ0sbq+wL
cke7yRBTM/XIGOQfhqq8yC2q8/zXwUbZg0UsCAxDGNwUr0Qlm829laIU/UN1KcYS
57Nebl1z05wMEvYmyl4JBAl9ozne7KS9DyW7jbrAXE8TaEy3+pY66kx5GG6v2+up
ScITGm4YPmPPlpOF1UjQloosgxdVa+fVp8aNCa/rf0JNO0Uhb3OKOZ+4kYmpfPn/
trwoKWAa6CV1uAJ+3zDkLMq1JNlrV4OMp1QvX0wzA47a/n466JMN9SFb0Ng5wf19
VOtT5Zu7chDStBudVjxlMDfUixvhvn4sjbaLNYR1fyWPoNXwr0KX2lpTP1QOzp9/
Sd0iiJ8RPfXn8Xo26MStu4I52CZjS7yEMgJGCLH/mgPuSbrHHYYrrrCPJgmQOZG2
TNMI+EqOwQvHh2ghdv7t7EEk4IslBk0QzufMXQ2WFXQ20nvj74mrmmiMuBcmonpR
0egA5/M18ZPLQxYu0Q86NUr4XHtAG1fq+n8pseQ7Avy6Gk6HRiezCbB7TJ9rnNeu
jie1TDajC6W7rx0VF7hcxkIrDgNgnYcjXUV2hMx1lo4fIoWkL3nJJVEthMVIcJOX
EwIDAQABo1MwUTAdBgNVHQ4EFgQUHawIRAo1bW8Xy7l4oKfM+ESjhs0wHwYDVR0j
BBgwFoAUHawIRAo1bW8Xy7l4oKfM+ESjhs0wDwYDVR0TAQH/BAUwAwEB/zANBgkq
hkiG9w0BAQsFAAOCAgEAozQJk5Ahx7rDn/aMXLdZFxR81VfkmHDm7NhlJsdVKUx5
o/iegXnvwc1PoeKsz2S504QiuL8l7jqZoU2WPIm7Vlr+oxBgiKqjo1EqBsUgNCZ7
qxMD84TVp/KBGjKUh1TXhjJwGGfNNr+R/fJGw+36UeuY3fSckjaYTuNuVElp+DoZ
/pGyu1qpcybLfiR8mpQkCeU/iBq5gIjWddbVjlYoTKfqULZrpsAF2AeqELEgyshl
p3PNhW/54TJSn4mWK+39BibYHPkvx8orEuWKyjjRk82hEXi7J3hsGKX29qC3oO40
67DKDWmZdMCz+E1ERf10V0bSp6iJnnlwknHJloZUETV1NY/DdoSC6e8CN0+0cQqL
aJefJ483O3sXyN3v3+DaEFBLPFgRFGZB7eaBwR2xAv/KfjT5dSyi+wA4LZAxsQMC
Q7UYGNAfHLNHJo/bsj12+JDhJaFZ/KoBKzyMUuEXmvjxXNDMCfm+gVQFoLyXkGq3
491W/O7LjR6pkD+ce0qeTFMu3nfUubyfbONVDEfuH4GC1e+FAggCRaBnFsVzCzXj
jxOOLoQ9nwLk8v17mx0BSwX4iuqvXFntfJbzfcnzQfx/qqPFheIbGnmKw1lrRML8
87ZbN6t01+v2YyYe6Mc7p80s1R3jc8aVX8ca2KcYwsJAkg/xz0q5RJwsE1is5UY=
-----END CERTIFICATE-----

52
server/https.key Normal file
View File

@ -0,0 +1,52 @@
-----BEGIN PRIVATE KEY-----
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQDSC88PF8NyLkag
K5mAZ/d739SOU16l2Cx3zE35zZQhO29+1L4L+oMksLYipo+FMgtGO+MSzFsvGgKC
s2sDSdfyoNSTZ3QaN4BAZ0sbq+wLcke7yRBTM/XIGOQfhqq8yC2q8/zXwUbZg0Us
CAxDGNwUr0Qlm829laIU/UN1KcYS57Nebl1z05wMEvYmyl4JBAl9ozne7KS9DyW7
jbrAXE8TaEy3+pY66kx5GG6v2+upScITGm4YPmPPlpOF1UjQloosgxdVa+fVp8aN
Ca/rf0JNO0Uhb3OKOZ+4kYmpfPn/trwoKWAa6CV1uAJ+3zDkLMq1JNlrV4OMp1Qv
X0wzA47a/n466JMN9SFb0Ng5wf19VOtT5Zu7chDStBudVjxlMDfUixvhvn4sjbaL
NYR1fyWPoNXwr0KX2lpTP1QOzp9/Sd0iiJ8RPfXn8Xo26MStu4I52CZjS7yEMgJG
CLH/mgPuSbrHHYYrrrCPJgmQOZG2TNMI+EqOwQvHh2ghdv7t7EEk4IslBk0QzufM
XQ2WFXQ20nvj74mrmmiMuBcmonpR0egA5/M18ZPLQxYu0Q86NUr4XHtAG1fq+n8p
seQ7Avy6Gk6HRiezCbB7TJ9rnNeujie1TDajC6W7rx0VF7hcxkIrDgNgnYcjXUV2
hMx1lo4fIoWkL3nJJVEthMVIcJOXEwIDAQABAoICAF45S+ZSW6uh1K7PQCnY+a0J
CJncDk5JPhFzhds0fGm39tknaCWJeEECQIIkw6cVfvc/sCpjn9fuTAgDolK0UnoV
6aZCN1P3Z8H8VDYSlm3AEyvLE1avrWbYu6TkzTyoc8wHbXn/yt+SQnpxFccXpMpm
oSRZ0x5jvHS79AHf/mnGpLEMw0FNQOgtrVxTVYGn3PYOPcyhzXi+Dcgn2QmnnxVu
qVOyxqehKTL9YdHjzsB/RN868P5RJocd3gmgVuyzS0KSf+oi4Ln4bFoiaVc0HDL3
DpjkHSl5lgu+xclRNfifKaK+hM0tLHi1VfFB//WrnjdKU3oSpQF4oowprM4Jn5AP
jhRI54JWZlWnvbiAOx7D49xFga3EnqjVH6So2gxi+q3Dv25luXGAnueaBPDpVC6c
nkJm2aCl7T3xlVpW8O5Fs+rsP8Xr9RTyEQJauM01uOi3N2zEeO8ERxTYEW5Sy2U7
OFKRXtLj7Jnejib/SxWGcIX4Wid5QFAygbXz4APfFN22QU0fqmhm4/c2OB/xM8qr
VVFx4xlG2wnuq5CZdZjmK3MTbmSM+pWW8mly/+++p694cf5oXGenYus/JWFNwxj/
fPyA7zQmaTOidu6clDHzkPCOE7TBv9TkQ7lL6ClgE7B39JR65ZQtjCYqRsADKsGI
dFMg+HDmGbVEfWg2V0GBAoIBAQDupImrJ0JXHA/0SEC2Tbz7pE60fRwmBFdhvk4Z
rzZiaOl+M2HXQU6b5DYhKcgdiFah5IuAnsRPo6X5Ug+Q1DV3OFTuEGAkXgqZliNa
aXsJcc0++DYlXX3BrTb66gylVLQRs5tZzsXps5iXWclziDC2go8RKnCwxsxwbzVq
FP4hoBP4dp83WoLF4NznnGFGw3/KLlMivtRxDE5OegpxTuWGlA/bVtT187Ksuuz3
dFUayLfpg0ABS/E7wwAJjSUpPPEi3J/G255H3lZXgS1gWcAf3rGDQYlJKF8UHdja
yWQcAOF+b/bYEpa4lHw+UtKNNkPTiCV4Y7CNQd8a2Gcl7VFTAoIBAQDhUs9r1dhm
rUlNAunVZZZVZ91XhXeqVTa/9xUDEvDh91nB5c7CcuNXxwcX4oTsMF4Bc7CHlvOv
pybp+QLjK310VjxxkFYJT0TKWuYqLjtNkQ93sp8wF3gVCf8m8bMOX/gPfQzNZWKp
un+ZWnzXNU5d2A+63xbZmFzT0Zo6H/h9YEO5Xxw32HCKFzEhl5JD34muZTEXSpdD
p7LUUr5LvnoUqEzonhXx2qRnTLP87d1o0GlkVex9HeeeBgrvm57QYoJnABxw9UFM
/ocLeYsjkmqJQRBDWgiwQlos1pdZyX2Yj20b7Wm5Pxd4aM9gh5EZZMXeQHhbHlWz
UY1IPxfAkytBAoIBAHmYavFDisD58oMlAZwiViXeXaAHk30nfyK1pfPeXBaeoEKG
idb1VsmF6bLSKD4sBwBshExgGWT+3IYCMx43kpqRoGzA+UvugvYpExBxaJiyXMM2
E9jMH1S9HqOQ+CqR00KlwoVrH1rqANk1jbkJbtDAC4fSmSLp2Kd9crj/w1F80FAs
mQnKW5HZ9pUpEEPPP2DUY9XzaCnF/GxuML31VmxRKxc20kIUDzmF8VJQ+0Avf85C
6yz99gfeXzl+qq2teKyrv9nCc47pEhN6JZXPhV53yPk5PmuBX5jPcHxiW1kNddhH
0n3cUuHv/rJ+3vvG555z46vJF9+R7c0u8LfZiTMCggEBAMQd4a/IN0xXM1+2U3SL
sSew+XR+FMPK25aGJmHAkKz9L8CWlzmj6cCy2LevT2aMSqYU3eeGOZ//at1nAV5c
shsaHA30RQ5hUkyWhZLdHnzK752NeQTQyJH3W3+4C9NNMIm6m/QCdLeqPflqSxK9
sPH5ZueN2UOXW+R5oTVKMmxd51RnNhZdasamnPrSBFrTK/EA3pOZNsOKKRqo0jz3
Eyb7vcUSI6OYXFQU7OwO1RGvpKvSJb5Y0wo11DrtRnO16i5gaGDg9u9e8ofISJSz
kcrZOKCGst1HQ1mXhbB+sbSh0aPnJog4I+OHxkgMdvyVO6vQjXExnAIxzzi8wZ25
+oECggEBAIT6q/sn8xFt5Jwc/0Z7YUjd415Nknam09tnbB+UPRR6lt6JFoILx8by
5Y1sN30HWDv27v9G32oZhUDii3Rt3PkbYLqlHy7XBMEXA9WIUo+3Be7mtdL8Wfrj
0zn0b7Hks9a9KsElG1dXUopwjMRL3M22UamaN7e/gl5jz2I7pyc5oaqz9GRDV5yG
slb6gGZ5naMycJD3p8vutXbmgKRr9beRp55UICAbEMdr5p3ks8bfR33Z6t+a97u1
IxI5x5Lb0fdfvL8JK3nRWn7Uzbmm5Ni/OaODNKP+fIm9m2yDAs8LM8RGpPtk6i0d
qIRta3H9KNw2Mhpkm77TtUSV/W5aOmY=
-----END PRIVATE KEY-----

162
server/serve.js Normal file
View File

@ -0,0 +1,162 @@
/*
micro http2 server with file monitoring and automatic app rebuild
- can process concurrent http requests
- monitors specified filed and folders for changes
- triggers library and application rebuild
- any build errors are immediately displayed and can be corrected without need for restart
- passthrough data compression
*/
const fs = require('fs');
const zlib = require('zlib');
const http = require('http');
const http2 = require('http2');
const path = require('path');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const chokidar = require('chokidar');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const log = require('@vladmandic/pilogger');
const build = require('./build.js');
// app configuration
// you can provide your server key and certificate or use provided self-signed ones
// self-signed certificate generated using:
// openssl req -x509 -newkey rsa:4096 -nodes -keyout https.key -out https.crt -days 365 -subj "/C=US/ST=Florida/L=Miami/O=@vladmandic"
// client app does not work without secure server since browsers enforce https for webcam access
const options = {
key: fs.readFileSync('server/https.key'),
cert: fs.readFileSync('server/https.crt'),
root: '..',
default: 'demo/index.html',
httpPort: 8000,
httpsPort: 8001,
insecureHTTPParser: false,
minElapsed: 2,
monitor: ['package.json', 'demo', 'src'],
};
// just some predefined mime types
const mime = {
'.html': 'text/html; charset=utf-8',
'.js': 'text/javascript; charset=utf-8',
'.css': 'text/css; charset=utf-8',
'.json': 'application/json; charset=utf-8',
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.gif': 'image/gif',
'.ico': 'image/x-icon',
'.svg': 'image/svg+xml',
'.wav': 'audio/wav',
'.mp4': 'video/mp4',
'.woff': 'font/woff',
'.woff2': 'font/woff2',
'.ttf': 'font/ttf',
'.wasm': 'application/wasm',
};
// checks for multiple events triggering within minElapsed and merge get into single event
let last = Date.now();
async function buildAll(evt, msg) {
const now = Date.now();
if ((now - last) > options.minElapsed) build.build(evt, msg, true);
else log.state('Build: merge event file', msg, evt);
last = now;
}
// watch filesystem for any changes and notify build when needed
async function watch() {
const watcher = chokidar.watch(options.monitor, {
persistent: true,
ignorePermissionErrors: false,
alwaysStat: false,
ignoreInitial: true,
followSymlinks: true,
usePolling: false,
useFsEvents: false,
atomic: true,
});
// single event handler for file add/change/delete
watcher
.on('add', (evt) => buildAll(evt, 'add'))
.on('change', (evt) => buildAll(evt, 'modify'))
.on('unlink', (evt) => buildAll(evt, 'remove'))
.on('error', (err) => log.error(`Client watcher error: ${err}`))
.on('ready', () => log.state('Monitoring:', options.monitor));
}
// get file content for a valid url request
function handle(url) {
return new Promise((resolve) => {
let obj = { ok: false };
obj.file = url;
if (!fs.existsSync(obj.file)) resolve(null);
obj.stat = fs.statSync(obj.file);
if (obj.stat.isFile()) obj.ok = true;
if (!obj.ok && obj.stat.isDirectory()) {
obj.file = path.join(obj.file, options.default);
// @ts-ignore
obj = handle(obj.file);
}
resolve(obj);
});
}
// process http requests
async function httpRequest(req, res) {
handle(path.join(__dirname, options.root, decodeURI(req.url)))
.then((result) => {
// get original ip of requestor, regardless if it's behind proxy or not
// eslint-disable-next-line dot-notation
const forwarded = (req.headers['forwarded'] || '').match(/for="\[(.*)\]:/);
const ip = (Array.isArray(forwarded) ? forwarded[1] : null) || req.headers['x-forwarded-for'] || req.ip || req.socket.remoteAddress;
if (!result || !result.ok) {
res.writeHead(404, { 'Content-Type': 'text/html' });
res.end('Error 404: Not Found\n', 'utf-8');
log.warn(`${req.method}/${req.httpVersion}`, res.statusCode, req.url, ip);
} else {
const ext = String(path.extname(result.file)).toLowerCase();
const contentType = mime[ext] || 'application/octet-stream';
const accept = req.headers['accept-encoding'] ? req.headers['accept-encoding'].includes('br') : false; // does target accept brotli compressed data
res.writeHead(200, {
// 'Content-Length': result.stat.size, // not using as it's misleading for compressed streams
'Content-Language': 'en', 'Content-Type': contentType, 'Content-Encoding': accept ? 'br' : '', 'Last-Modified': result.stat.mtime, 'Cache-Control': 'no-cache', 'X-Content-Type-Options': 'nosniff',
});
const compress = zlib.createBrotliCompress({ params: { [zlib.constants.BROTLI_PARAM_QUALITY]: 5 } }); // instance of brotli compression with level 5
const stream = fs.createReadStream(result.file);
if (!accept) stream.pipe(res); // don't compress data
else stream.pipe(compress).pipe(res); // compress data
// alternative methods of sending data
/// 2. read stream and send by chunk
// const stream = fs.createReadStream(result.file);
// stream.on('data', (chunk) => res.write(chunk));
// stream.on('end', () => res.end());
// 3. read entire file and send it as blob
// const data = fs.readFileSync(result.file);
// res.write(data);
log.data(`${req.method}/${req.httpVersion}`, res.statusCode, contentType, result.stat.size, req.url, ip);
}
return null;
})
.catch((err) => log.error('handle error:', err));
}
// app main entry point
async function main() {
log.header();
await watch();
if (options.httpPort && options.httpPort > 0) {
const server1 = http.createServer(options, httpRequest);
server1.on('listening', () => log.state('HTTP server listening:', options.httpPort));
server1.listen(options.httpPort);
}
if (options.httpsPort && options.httpsPort > 0) {
const server2 = http2.createSecureServer(options, httpRequest);
server2.on('listening', () => log.state('HTTP2 server listening:', options.httpsPort));
server2.listen(options.httpsPort);
}
await build.build('all', 'startup', true);
}
main();

View File

@ -10,9 +10,9 @@ export abstract class NeuralNetwork<TNetParams> {
this._name = name; this._name = name;
} }
protected _params: TNetParams | undefined = undefined; protected _params: TNetParams | undefined = undefined
protected _paramMappings: ParamMapping[] = []; protected _paramMappings: ParamMapping[] = []
public _name: any; public _name: any;
@ -102,9 +102,8 @@ export abstract class NeuralNetwork<TNetParams> {
} }
const { readFile } = env.getEnv(); const { readFile } = env.getEnv();
const { manifestUri, modelBaseUri } = getModelUris(filePath, this.getDefaultModelName()); const { manifestUri, modelBaseUri } = getModelUris(filePath, this.getDefaultModelName());
const fetchWeightsFromDisk = (filePaths: string[]) => Promise.all(filePaths.map((fp) => readFile(fp).then((buf) => (typeof buf === 'string' ? Buffer.from(buf) : buf.buffer)))); const fetchWeightsFromDisk = (filePaths: string[]) => Promise.all(filePaths.map((fp) => readFile(fp).then((buf) => buf.buffer)));
// @ts-ignore async-vs-sync mismatch const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk);
const loadWeights = tf['io'].weightsLoaderFactory(fetchWeightsFromDisk);
const manifest = JSON.parse((await readFile(manifestUri)).toString()); const manifest = JSON.parse((await readFile(manifestUri)).toString());
const weightMap = await loadWeights(manifest, modelBaseUri); const weightMap = await loadWeights(manifest, modelBaseUri);
this.loadFromWeightMap(weightMap); this.loadFromWeightMap(weightMap);

View File

@ -1,10 +1,6 @@
export class PlatformBrowser { export class PlatformBrowser {
private textEncoder: TextEncoder; private textEncoder: TextEncoder;
constructor() {
this.textEncoder = new TextEncoder();
}
fetch(path: string, init?: any): Promise<Response> { fetch(path: string, init?: any): Promise<Response> {
return fetch(path, init); return fetch(path, init);
} }
@ -17,6 +13,9 @@ export class PlatformBrowser {
if (encoding !== 'utf-8' && encoding !== 'utf8') { if (encoding !== 'utf-8' && encoding !== 'utf8') {
throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`); throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);
} }
if (this.textEncoder == null) {
this.textEncoder = new TextEncoder();
}
return this.textEncoder.encode(text); return this.textEncoder.encode(text);
} }

View File

@ -1,4 +1,5 @@
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm';
import { fullyConnectedLayer } from '../common/fullyConnectedLayer'; import { fullyConnectedLayer } from '../common/fullyConnectedLayer';
import { seperateWeightMaps } from '../faceProcessor/util'; import { seperateWeightMaps } from '../faceProcessor/util';
import { TinyXception } from '../xception/TinyXception'; import { TinyXception } from '../xception/TinyXception';
@ -9,7 +10,7 @@ import { NeuralNetwork } from '../NeuralNetwork';
import { NetInput, TNetInput, toNetInput } from '../dom/index'; import { NetInput, TNetInput, toNetInput } from '../dom/index';
export class AgeGenderNet extends NeuralNetwork<NetParams> { export class AgeGenderNet extends NeuralNetwork<NetParams> {
private _faceFeatureExtractor: TinyXception; private _faceFeatureExtractor: TinyXception
constructor(faceFeatureExtractor: TinyXception = new TinyXception(2)) { constructor(faceFeatureExtractor: TinyXception = new TinyXception(2)) {
super('AgeGenderNet'); super('AgeGenderNet');
@ -84,7 +85,7 @@ export class AgeGenderNet extends NeuralNetwork<NetParams> {
return 'age_gender_model'; return 'age_gender_model';
} }
public override dispose(throwOnRedispose = true) { public dispose(throwOnRedispose = true) {
this.faceFeatureExtractor.dispose(throwOnRedispose); this.faceFeatureExtractor.dispose(throwOnRedispose);
super.dispose(throwOnRedispose); super.dispose(throwOnRedispose);
} }

View File

@ -7,8 +7,10 @@ export interface IBoundingBox {
bottom: number bottom: number
} }
export class BoundingBox extends Box implements IBoundingBox { export class BoundingBox extends Box<BoundingBox> implements IBoundingBox {
constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions = false) { constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions = false) {
super({ left, top, right, bottom }, allowNegativeDimensions); super({
left, top, right, bottom,
}, allowNegativeDimensions);
} }
} }

View File

@ -19,13 +19,13 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
} }
} }
private _x: number; private _x: number
private _y: number; private _y: number
private _width: number; private _width: number
private _height: number; private _height: number
constructor(_box: IBoundingBox | IRect, allowNegativeDimensions = true) { constructor(_box: IBoundingBox | IRect, allowNegativeDimensions = true) {
const box = (_box || {}) as any; const box = (_box || {}) as any;
@ -128,7 +128,9 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
this.width + padX, this.width + padX,
this.height + padY, this.height + padY,
]; ];
return new Box({ x, y, width, height }); return new Box({
x, y, width, height,
});
} }
public clipAtImageBorders(imgWidth: number, imgHeight: number): Box<BoxType> { public clipAtImageBorders(imgWidth: number, imgHeight: number): Box<BoxType> {
@ -141,7 +143,9 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
const clippedWidth = Math.min(newWidth, imgWidth - clippedX); const clippedWidth = Math.min(newWidth, imgWidth - clippedX);
const clippedHeight = Math.min(newHeight, imgHeight - clippedY); const clippedHeight = Math.min(newHeight, imgHeight - clippedY);
return (new Box({ x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight })).floor(); return (new Box({
x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight,
})).floor();
} }
public shift(sx: number, sy: number): Box<BoxType> { public shift(sx: number, sy: number): Box<BoxType> {
@ -149,7 +153,9 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
const x = this.x + sx; const x = this.x + sx;
const y = this.y + sy; const y = this.y + sy;
return new Box({ x, y, width, height }); return new Box({
x, y, width, height,
});
} }
public padAtBorders(imageHeight: number, imageWidth: number) { public padAtBorders(imageHeight: number, imageWidth: number) {
@ -183,7 +189,9 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
y = 1; y = 1;
} }
return { dy, edy, dx, edx, y, ey, x, ex, w, h }; return {
dy, edy, dx, edx, y, ey, x, ex, w, h,
};
} }
public calibrate(region: Box) { public calibrate(region: Box) {

View File

@ -6,9 +6,9 @@ export interface IDimensions {
} }
export class Dimensions implements IDimensions { export class Dimensions implements IDimensions {
private _width: number; private _width: number
private _height: number; private _height: number
constructor(width: number, height: number) { constructor(width: number, height: number) {
if (!isValidNumber(width) || !isValidNumber(height)) { if (!isValidNumber(width) || !isValidNumber(height)) {

View File

@ -17,7 +17,7 @@ export class FaceDetection extends ObjectDetection implements IFaceDetecion {
super(score, score, '', relativeBox, imageDims); super(score, score, '', relativeBox, imageDims);
} }
public override forSize(width: number, height: number): FaceDetection { public forSize(width: number, height: number): FaceDetection {
const { score, relativeBox, imageDims } = super.forSize(width, height); const { score, relativeBox, imageDims } = super.forSize(width, height);
return new FaceDetection(score, relativeBox, imageDims); return new FaceDetection(score, relativeBox, imageDims);
} }

View File

@ -18,11 +18,11 @@ export interface IFaceLandmarks {
} }
export class FaceLandmarks implements IFaceLandmarks { export class FaceLandmarks implements IFaceLandmarks {
protected _shift: Point; protected _shift: Point
protected _positions: Point[]; protected _positions: Point[]
protected _imgDims: Dimensions; protected _imgDims: Dimensions
constructor( constructor(
relativeFaceLandmarkPositions: Point[], relativeFaceLandmarkPositions: Point[],

View File

@ -3,7 +3,7 @@ import { FaceLandmarks } from './FaceLandmarks';
import { Point } from './Point'; import { Point } from './Point';
export class FaceLandmarks5 extends FaceLandmarks { export class FaceLandmarks5 extends FaceLandmarks {
protected override getRefPointsForAlignment(): Point[] { protected getRefPointsForAlignment(): Point[] {
const pts = this.positions; const pts = this.positions;
return [ return [
pts[0], pts[0],

View File

@ -31,7 +31,7 @@ export class FaceLandmarks68 extends FaceLandmarks {
return this.positions.slice(48, 68); return this.positions.slice(48, 68);
} }
protected override getRefPointsForAlignment(): Point[] { protected getRefPointsForAlignment(): Point[] {
return [ return [
this.getLeftEye(), this.getLeftEye(),
this.getRightEye(), this.getRightEye(),

View File

@ -6,8 +6,9 @@ export interface IFaceMatch {
} }
export class FaceMatch implements IFaceMatch { export class FaceMatch implements IFaceMatch {
private _label: string; private _label: string
private _distance: number;
private _distance: number
constructor(label: string, distance: number) { constructor(label: string, distance: number) {
this._label = label; this._label = label;

View File

@ -3,15 +3,16 @@ import { IBoundingBox } from './BoundingBox';
import { Box } from './Box'; import { Box } from './Box';
import { IRect } from './Rect'; import { IRect } from './Rect';
export class LabeledBox extends Box { export class LabeledBox extends Box<LabeledBox> {
public static assertIsValidLabeledBox(box: any, callee: string) { public static assertIsValidLabeledBox(box: any, callee: string) {
Box.assertIsValidBox(box, callee); Box.assertIsValidBox(box, callee);
if (!isValidNumber(box.label)) { if (!isValidNumber(box.label)) {
throw new Error(`${callee} - expected property label (${box.label}) to be a number`); throw new Error(`${callee} - expected property label (${box.label}) to be a number`);
} }
} }
private _label: number; private _label: number
constructor(box: IBoundingBox | IRect | any, label: number) { constructor(box: IBoundingBox | IRect | any, label: number) {
super(box); super(box);

View File

@ -1,7 +1,7 @@
export class LabeledFaceDescriptors { export class LabeledFaceDescriptors {
private _label: string; private _label: string
private _descriptors: Float32Array[]; private _descriptors: Float32Array[]
constructor(label: string, descriptors: Float32Array[]) { constructor(label: string, descriptors: Float32Array[]) {
if (!(typeof label === 'string')) { if (!(typeof label === 'string')) {

View File

@ -3,15 +3,15 @@ import { Dimensions, IDimensions } from './Dimensions';
import { IRect, Rect } from './Rect'; import { IRect, Rect } from './Rect';
export class ObjectDetection { export class ObjectDetection {
private _score: number; private _score: number
private _classScore: number; private _classScore: number
private _className: string; private _className: string
private _box: Rect; private _box: Rect
private _imageDims: Dimensions; private _imageDims: Dimensions
constructor( constructor(
score: number, score: number,

View File

@ -4,9 +4,9 @@ export interface IPoint {
} }
export class Point implements IPoint { export class Point implements IPoint {
private _x: number; private _x: number
private _y: number; private _y: number
constructor(x: number, y: number) { constructor(x: number, y: number) {
this._x = x; this._x = x;

View File

@ -15,9 +15,9 @@ export class PredictedBox extends LabeledBox {
} }
} }
private _score: number; private _score: number
private _classScore: number; private _classScore: number
constructor(box: IBoundingBox | IRect | any, label: number, score: number, classScore: number) { constructor(box: IBoundingBox | IRect | any, label: number, score: number, classScore: number) {
super(box, label); super(box, label);

View File

@ -7,8 +7,10 @@ export interface IRect {
height: number height: number
} }
export class Rect extends Box implements IRect { export class Rect extends Box<Rect> implements IRect {
constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions = false) { constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions = false) {
super({ x, y, width, height }, allowNegativeDimensions); super({
x, y, width, height,
}, allowNegativeDimensions);
} }
} }

View File

@ -9,17 +9,17 @@ import { imageToSquare } from './imageToSquare';
import { TResolvedNetInput } from './types'; import { TResolvedNetInput } from './types';
export class NetInput { export class NetInput {
private _imageTensors: Array<tf.Tensor3D | tf.Tensor4D> = []; private _imageTensors: Array<tf.Tensor3D | tf.Tensor4D> = []
private _canvases: HTMLCanvasElement[] = []; private _canvases: HTMLCanvasElement[] = []
private _batchSize: number; private _batchSize: number
private _treatAsBatchInput = false; private _treatAsBatchInput = false
private _inputDimensions: number[][] = []; private _inputDimensions: number[][] = []
private _inputSize = 0; private _inputSize: number
constructor(inputs: Array<TResolvedNetInput>, treatAsBatchInput = false) { constructor(inputs: Array<TResolvedNetInput>, treatAsBatchInput = false) {
if (!Array.isArray(inputs)) { if (!Array.isArray(inputs)) {
@ -47,9 +47,8 @@ export class NetInput {
return; return;
} }
// @ts-ignore
const canvas = (input as any) instanceof env.getEnv().Canvas ? input : createCanvasFromMedia(input); const canvas = (input as any) instanceof env.getEnv().Canvas ? input : createCanvasFromMedia(input);
this._canvases[idx] = canvas as HTMLCanvasElement; this._canvases[idx] = canvas;
this._inputDimensions[idx] = [canvas.height, canvas.width, 3]; this._inputDimensions[idx] = [canvas.height, canvas.width, 3];
}); });
} }
@ -128,24 +127,23 @@ export class NetInput {
if (input instanceof tf.Tensor) { if (input instanceof tf.Tensor) {
let imgTensor = isTensor4D(input) ? input : tf.expandDims(input); let imgTensor = isTensor4D(input) ? input : tf.expandDims(input);
imgTensor = padToSquare(imgTensor as tf.Tensor4D, isCenterInputs); imgTensor = padToSquare(imgTensor, isCenterInputs);
if (imgTensor.shape[1] !== inputSize || imgTensor.shape[2] !== inputSize) { if (imgTensor.shape[1] !== inputSize || imgTensor.shape[2] !== inputSize) {
imgTensor = tf['image'].resizeBilinear(imgTensor as tf.Tensor4D, [inputSize, inputSize], false, false); imgTensor = tf.image.resizeBilinear(imgTensor, [inputSize, inputSize], false, false);
} }
return imgTensor.as3D(inputSize, inputSize, 3); return imgTensor.as3D(inputSize, inputSize, 3);
} }
if (input instanceof env.getEnv().Canvas) { if (input instanceof env.getEnv().Canvas) {
return tf['browser'].fromPixels(imageToSquare(input, inputSize, isCenterInputs)); return tf.browser.fromPixels(imageToSquare(input, inputSize, isCenterInputs));
} }
throw new Error(`toBatchTensor - at batchIdx ${batchIdx}, expected input to be instanceof tf.Tensor or instanceof HTMLCanvasElement, instead have ${input}`); throw new Error(`toBatchTensor - at batchIdx ${batchIdx}, expected input to be instanceof tf.Tensor or instanceof HTMLCanvasElement, instead have ${input}`);
}); });
const batchTensor = tf.stack(inputTensors.map((t) => tf.cast(t, 'float32'))).as4D(this.batchSize, inputSize, inputSize, 3); const batchTensor = tf.stack(inputTensors.map((t) => tf.cast(t, 'float32'))).as4D(this.batchSize, inputSize, inputSize, 3);
// const batchTensor = tf.stack(inputTensors.map((t) => tf.cast(t, 'float32'))) as tf.Tensor4D;
return batchTensor; return batchTensor;
}); });

View File

@ -4,10 +4,7 @@ import { isMediaLoaded } from './isMediaLoaded';
export function awaitMediaLoaded(media: HTMLImageElement | HTMLVideoElement | HTMLCanvasElement) { export function awaitMediaLoaded(media: HTMLImageElement | HTMLVideoElement | HTMLCanvasElement) {
// eslint-disable-next-line consistent-return // eslint-disable-next-line consistent-return
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
if (media instanceof env.getEnv().Canvas || isMediaLoaded(media)) { if (media instanceof env.getEnv().Canvas || isMediaLoaded(media)) return resolve(null);
resolve(null);
return;
}
function onError(e: Event) { function onError(e: Event) {
if (!e.currentTarget) return; if (!e.currentTarget) return;

View File

@ -25,11 +25,17 @@ export async function extractFaceTensors(imageTensor: tf.Tensor3D | tf.Tensor4D,
return tf.tidy(() => { return tf.tidy(() => {
const [imgHeight, imgWidth, numChannels] = imageTensor.shape.slice(isTensor4D(imageTensor) ? 1 : 0); const [imgHeight, imgWidth, numChannels] = imageTensor.shape.slice(isTensor4D(imageTensor) ? 1 : 0);
const boxes = detections.map((det) => (det instanceof FaceDetection ? det.forSize(imgWidth, imgHeight).box : det))
const boxes = detections
.map((det) => (det instanceof FaceDetection
? det.forSize(imgWidth, imgHeight).box
: det))
.map((box) => box.clipAtImageBorders(imgWidth, imgHeight)); .map((box) => box.clipAtImageBorders(imgWidth, imgHeight));
const faceTensors = boxes
.filter((box) => box.width > 0 && box.height > 0) const faceTensors = boxes.map(({
.map(({ x, y, width, height }) => tf.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels])); x, y, width, height,
}) => tf.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels]));
return faceTensors; return faceTensors;
}); });
} }

View File

@ -3,10 +3,21 @@ import { resolveInput } from './resolveInput';
export function getContext2dOrThrow(canvasArg: string | HTMLCanvasElement | CanvasRenderingContext2D): CanvasRenderingContext2D { export function getContext2dOrThrow(canvasArg: string | HTMLCanvasElement | CanvasRenderingContext2D): CanvasRenderingContext2D {
const { Canvas, CanvasRenderingContext2D } = env.getEnv(); const { Canvas, CanvasRenderingContext2D } = env.getEnv();
if (canvasArg instanceof CanvasRenderingContext2D) return canvasArg;
if (canvasArg instanceof CanvasRenderingContext2D) {
return canvasArg;
}
const canvas = resolveInput(canvasArg); const canvas = resolveInput(canvasArg);
if (!(canvas instanceof Canvas)) throw new Error('resolveContext2d - expected canvas to be of instance of Canvas');
const ctx = canvas.getContext('2d', { willReadFrequently: true }); if (!(canvas instanceof Canvas)) {
if (!ctx) throw new Error('resolveContext2d - canvas 2d context is null'); throw new Error('resolveContext2d - expected canvas to be of instance of Canvas');
}
const ctx = canvas.getContext('2d');
if (!ctx) {
throw new Error('resolveContext2d - canvas 2d context is null');
}
return ctx; return ctx;
} }

View File

@ -11,7 +11,7 @@ export async function imageTensorToCanvas(
const [height, width, numChannels] = imgTensor.shape.slice(isTensor4D(imgTensor) ? 1 : 0); const [height, width, numChannels] = imgTensor.shape.slice(isTensor4D(imgTensor) ? 1 : 0);
const imgTensor3D = tf.tidy(() => imgTensor.as3D(height, width, numChannels).toInt()); const imgTensor3D = tf.tidy(() => imgTensor.as3D(height, width, numChannels).toInt());
await tf['browser'].toPixels(imgTensor3D, targetCanvas); await tf.browser.toPixels(imgTensor3D, targetCanvas);
imgTensor3D.dispose(); imgTensor3D.dispose();

View File

@ -8,8 +8,7 @@ export async function loadWeightMap(
defaultModelName: string, defaultModelName: string,
): Promise<tf.NamedTensorMap> { ): Promise<tf.NamedTensorMap> {
const { manifestUri, modelBaseUri } = getModelUris(uri, defaultModelName); const { manifestUri, modelBaseUri } = getModelUris(uri, defaultModelName);
// @ts-ignore
const manifest = await fetchJson<tf.io.WeightsManifestConfig>(manifestUri); const manifest = await fetchJson<tf.io.WeightsManifestConfig>(manifestUri);
// if (manifest['weightsManifest']) manifest = manifest['weightsManifest']; // if (manifest['weightsManifest']) manifest = manifest['weightsManifest'];
return tf['io'].loadWeights(manifest, modelBaseUri); return tf.io.loadWeights(manifest, modelBaseUri);
} }

View File

@ -1,9 +1,11 @@
import type { Tensor3D, Tensor4D } from '../../dist/tfjs.esm'; import * as tf from '../../dist/tfjs.esm';
import { NetInput } from './NetInput'; import { NetInput } from './NetInput';
export type TMediaElement = HTMLImageElement | HTMLVideoElement | HTMLCanvasElement export type TMediaElement = HTMLImageElement | HTMLVideoElement | HTMLCanvasElement
export type TResolvedNetInput = TMediaElement | Tensor3D | Tensor4D export type TResolvedNetInput = TMediaElement | tf.Tensor3D | tf.Tensor4D
export type TNetInput = string | TResolvedNetInput | Array<string | TResolvedNetInput> | NetInput export type TNetInputArg = string | TResolvedNetInput
export type TNetInput = TNetInputArg | Array<TNetInputArg> | NetInput | tf.Tensor4D

View File

@ -11,13 +11,13 @@ export interface IDrawBoxOptions {
} }
export class DrawBoxOptions { export class DrawBoxOptions {
public boxColor: string; public boxColor: string
public lineWidth: number; public lineWidth: number
public drawLabelOptions: DrawTextFieldOptions; public drawLabelOptions: DrawTextFieldOptions
public label?: string; public label?: string
constructor(options: IDrawBoxOptions = {}) { constructor(options: IDrawBoxOptions = {}) {
const { const {
@ -36,9 +36,9 @@ export class DrawBoxOptions {
} }
export class DrawBox { export class DrawBox {
public box: Box; public box: Box
public options: DrawBoxOptions; public options: DrawBoxOptions
constructor( constructor(
box: IBoundingBox | IRect, box: IBoundingBox | IRect,

View File

@ -17,17 +17,17 @@ export interface IDrawFaceLandmarksOptions {
} }
export class DrawFaceLandmarksOptions { export class DrawFaceLandmarksOptions {
public drawLines: boolean; public drawLines: boolean
public drawPoints: boolean; public drawPoints: boolean
public lineWidth: number; public lineWidth: number
public pointSize: number; public pointSize: number
public lineColor: string; public lineColor: string
public pointColor: string; public pointColor: string
constructor(options: IDrawFaceLandmarksOptions = {}) { constructor(options: IDrawFaceLandmarksOptions = {}) {
const { const {
@ -43,9 +43,9 @@ export class DrawFaceLandmarksOptions {
} }
export class DrawFaceLandmarks { export class DrawFaceLandmarks {
public faceLandmarks: FaceLandmarks; public faceLandmarks: FaceLandmarks
public options: DrawFaceLandmarksOptions; public options: DrawFaceLandmarksOptions
constructor( constructor(
faceLandmarks: FaceLandmarks, faceLandmarks: FaceLandmarks,

View File

@ -25,17 +25,17 @@ export interface IDrawTextFieldOptions {
} }
export class DrawTextFieldOptions implements IDrawTextFieldOptions { export class DrawTextFieldOptions implements IDrawTextFieldOptions {
public anchorPosition: AnchorPosition; public anchorPosition: AnchorPosition
public backgroundColor: string; public backgroundColor: string
public fontColor: string; public fontColor: string
public fontSize: number; public fontSize: number
public fontStyle: string; public fontStyle: string
public padding: number; public padding: number
constructor(options: IDrawTextFieldOptions = {}) { constructor(options: IDrawTextFieldOptions = {}) {
const { const {
@ -51,11 +51,11 @@ export class DrawTextFieldOptions implements IDrawTextFieldOptions {
} }
export class DrawTextField { export class DrawTextField {
public text: string[]; public text: string[]
public anchor : IPoint; public anchor : IPoint
public options: DrawTextFieldOptions; public options: DrawTextFieldOptions
constructor( constructor(
text: string | string[] | DrawTextField, text: string | string[] | DrawTextField,

View File

@ -7,7 +7,12 @@ import { DrawTextField } from './DrawTextField';
export type DrawFaceExpressionsInput = FaceExpressions | WithFaceExpressions<{}> export type DrawFaceExpressionsInput = FaceExpressions | WithFaceExpressions<{}>
export function drawFaceExpressions(canvasArg: string | HTMLCanvasElement, faceExpressions: DrawFaceExpressionsInput | Array<DrawFaceExpressionsInput>, minConfidence = 0.1, textFieldAnchor?: IPoint) { export function drawFaceExpressions(
canvasArg: string | HTMLCanvasElement,
faceExpressions: DrawFaceExpressionsInput | Array<DrawFaceExpressionsInput>,
minConfidence = 0.1,
textFieldAnchor?: IPoint,
) {
const faceExpressionsArray = Array.isArray(faceExpressions) ? faceExpressions : [faceExpressions]; const faceExpressionsArray = Array.isArray(faceExpressions) ? faceExpressions : [faceExpressions];
faceExpressionsArray.forEach((e) => { faceExpressionsArray.forEach((e) => {

View File

@ -1,20 +1,26 @@
import { FileSystem } from './types'; import { FileSystem } from './types';
import { isNodejs } from './isNodejs';
export function createFileSystem(fs?: any): FileSystem { export function createFileSystem(fs?: any): FileSystem {
let requireFsError = ''; let requireFsError = '';
if (!fs && isNodejs()) {
if (!fs) {
try { try {
// eslint-disable-next-line global-require, @typescript-eslint/no-require-imports // eslint-disable-next-line global-require
fs = require('fs'); fs = require('fs');
} catch (err) { } catch (err) {
requireFsError = (err as any).toString(); requireFsError = err.toString();
} }
} }
const readFile = fs const readFile = fs
// eslint-disable-next-line no-undef ? (filePath: string) => new Promise<Buffer>((resolve, reject) => {
? (filePath: string) => new Promise<string | Buffer>((resolve, reject) => { fs.readFile(filePath, (err: NodeJS.ErrnoException | null, buffer: string | Buffer) => (err ? reject(err) : resolve(buffer))); }) fs.readFile(filePath, (err: any, buffer: Buffer) => (err ? reject(err) : resolve(buffer)));
: () => { throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`); }; })
return { readFile }; : () => {
throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`);
};
return {
readFile,
};
} }

View File

@ -3,9 +3,11 @@ import { createFileSystem } from './createFileSystem';
import { Environment } from './types'; import { Environment } from './types';
export function createNodejsEnv(): Environment { export function createNodejsEnv(): Environment {
const Canvas: (new () => HTMLCanvasElement) = (global as any)['Canvas'] || global.HTMLCanvasElement; // eslint-disable-next-line dot-notation
const Canvas = global['Canvas'] || global.HTMLCanvasElement;
const Image = global.Image || global.HTMLImageElement; const Image = global.Image || global.HTMLImageElement;
const Video: (new () => HTMLVideoElement) = (global as any)['Video'] || global.HTMLVideoElement; // eslint-disable-next-line dot-notation
const Video = global['Video'] || global.HTMLVideoElement;
const createCanvasElement = () => { const createCanvasElement = () => {
if (Canvas) return new Canvas(); if (Canvas) return new Canvas();

6
src/env/isNodejs.ts vendored
View File

@ -1,6 +1,6 @@
export function isNodejs(): boolean { export function isNodejs(): boolean {
return typeof global === 'object' return typeof global === 'object'
&& typeof process !== 'undefined' && typeof require === 'function'
&& process.versions != null && typeof module !== 'undefined'
&& process.versions.node != null; && typeof process !== 'undefined' && !!process.version;
} }

24
src/env/types.ts vendored
View File

@ -1,17 +1,17 @@
export type FileSystem = { export type FileSystem = {
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
readFile: (filePath: string) => Promise<string | Buffer>; readFile: (filePath: string) => Promise<Buffer>
}; }
export type Environment = FileSystem & { export type Environment = FileSystem & {
Canvas: typeof HTMLCanvasElement; Canvas: typeof HTMLCanvasElement
CanvasRenderingContext2D: typeof CanvasRenderingContext2D; CanvasRenderingContext2D: typeof CanvasRenderingContext2D
Image: typeof HTMLImageElement; Image: typeof HTMLImageElement
ImageData: typeof ImageData; ImageData: typeof ImageData
Video: typeof HTMLVideoElement; Video: typeof HTMLVideoElement
createCanvasElement: () => HTMLCanvasElement; createCanvasElement: () => HTMLCanvasElement
createImageElement: () => HTMLImageElement; createImageElement: () => HTMLImageElement
createVideoElement: () => HTMLVideoElement; createVideoElement: () => HTMLVideoElement
// eslint-disable-next-line no-undef, no-unused-vars // eslint-disable-next-line no-undef, no-unused-vars
fetch: (url: string, init?: RequestInit) => Promise<Response>; fetch: (url: string, init?: RequestInit) => Promise<Response>
}; }

View File

@ -1,10 +1,12 @@
export function euclideanDistance(arr1: number[] | Float32Array, arr2: number[] | Float32Array) { export function euclideanDistance(arr1: number[] | Float32Array, arr2: number[] | Float32Array) {
if (arr1.length !== arr2.length) throw new Error('euclideanDistance: arr1.length !== arr2.length'); if (arr1.length !== arr2.length) throw new Error('euclideanDistance: arr1.length !== arr2.length');
const desc1 = Array.from(arr1); const desc1 = Array.from(arr1);
const desc2 = Array.from(arr2); const desc2 = Array.from(arr2);
return Math.sqrt( return Math.sqrt(
desc1 desc1
.map((val, i) => val - desc2[i]) .map((val, i) => val - desc2[i])
.reduce((res, diff) => res + (diff * diff), 0), .reduce((res, diff) => res + (diff ** 2), 0),
); );
} }

View File

@ -1,13 +1,19 @@
export const FACE_EXPRESSION_LABELS = ['neutral', 'happy', 'sad', 'angry', 'fearful', 'disgusted', 'surprised'] as const; export const FACE_EXPRESSION_LABELS = ['neutral', 'happy', 'sad', 'angry', 'fearful', 'disgusted', 'surprised'];
export class FaceExpressions { export class FaceExpressions {
public neutral = 0; public neutral: number
public happy = 0;
public sad = 0; public happy: number
public angry = 0;
public fearful = 0; public sad: number
public disgusted = 0;
public surprised = 0; public angry: number
public fearful: number
public disgusted: number
public surprised: number
constructor(probabilities: number[] | Float32Array) { constructor(probabilities: number[] | Float32Array) {
if (probabilities.length !== 7) { if (probabilities.length !== 7) {

View File

@ -13,7 +13,7 @@ export abstract class FaceProcessor<
TExtractorParams extends FaceFeatureExtractorParams | TinyFaceFeatureExtractorParams TExtractorParams extends FaceFeatureExtractorParams | TinyFaceFeatureExtractorParams
> >
extends NeuralNetwork<NetParams> { extends NeuralNetwork<NetParams> {
protected _faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>; protected _faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>
constructor(_name: string, faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>) { constructor(_name: string, faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>) {
super(_name); super(_name);
@ -24,7 +24,7 @@ export abstract class FaceProcessor<
return this._faceFeatureExtractor; return this._faceFeatureExtractor;
} }
protected abstract override getDefaultModelName(): string protected abstract getDefaultModelName(): string
protected abstract getClassifierChannelsIn(): number protected abstract getClassifierChannelsIn(): number
@ -45,7 +45,7 @@ export abstract class FaceProcessor<
}); });
} }
public override dispose(throwOnRedispose = true) { public dispose(throwOnRedispose = true) {
this.faceFeatureExtractor.dispose(throwOnRedispose); this.faceFeatureExtractor.dispose(throwOnRedispose);
super.dispose(throwOnRedispose); super.dispose(throwOnRedispose);
} }

View File

@ -51,7 +51,7 @@ export class FaceRecognitionNet extends NeuralNetwork<NetParams> {
const globalAvg = out.mean([1, 2]) as tf.Tensor2D; const globalAvg = out.mean([1, 2]) as tf.Tensor2D;
const fullyConnected = tf.matMul(globalAvg, params.fc); const fullyConnected = tf.matMul(globalAvg, params.fc);
return fullyConnected as tf.Tensor2D; return fullyConnected;
}); });
} }
@ -60,7 +60,6 @@ export class FaceRecognitionNet extends NeuralNetwork<NetParams> {
} }
public async computeFaceDescriptor(input: TNetInput): Promise<Float32Array|Float32Array[]> { public async computeFaceDescriptor(input: TNetInput): Promise<Float32Array|Float32Array[]> {
// @ts-ignore
if (input?.shape?.some((dim) => dim <= 0)) return new Float32Array(128); if (input?.shape?.some((dim) => dim <= 0)) return new Float32Array(128);
const netInput = await toNetInput(input); const netInput = await toNetInput(input);
const faceDescriptorTensors = tf.tidy(() => tf.unstack(this.forwardInput(netInput))); const faceDescriptorTensors = tf.tidy(() => tf.unstack(this.forwardInput(netInput)));

View File

@ -1,4 +1,3 @@
import { Point } from '../classes';
import { FaceDetection } from '../classes/FaceDetection'; import { FaceDetection } from '../classes/FaceDetection';
import { FaceLandmarks } from '../classes/FaceLandmarks'; import { FaceLandmarks } from '../classes/FaceLandmarks';
import { FaceLandmarks68 } from '../classes/FaceLandmarks68'; import { FaceLandmarks68 } from '../classes/FaceLandmarks68';
@ -6,106 +5,75 @@ import { isWithFaceDetection, WithFaceDetection } from './WithFaceDetection';
export type WithFaceLandmarks< export type WithFaceLandmarks<
TSource extends WithFaceDetection<{}>, TSource extends WithFaceDetection<{}>,
TFaceLandmarks extends FaceLandmarks = FaceLandmarks68 TFaceLandmarks extends FaceLandmarks = FaceLandmarks68 > = TSource & {
> = TSource & { landmarks: TFaceLandmarks,
landmarks: TFaceLandmarks; unshiftedLandmarks: TFaceLandmarks,
unshiftedLandmarks: TFaceLandmarks; alignedRect: FaceDetection,
alignedRect: FaceDetection; angle: { roll: number | undefined, pitch: number | undefined, yaw: number | undefined },
angle: {
roll: number | undefined;
pitch: number | undefined;
yaw: number | undefined;
};
};
export function isWithFaceLandmarks(
obj: any,
): obj is WithFaceLandmarks<WithFaceDetection<{}>, FaceLandmarks> {
return (
isWithFaceDetection(obj)
&& (obj as any)['landmarks'] instanceof FaceLandmarks
&& (obj as any)['unshiftedLandmarks'] instanceof FaceLandmarks
&& (obj as any)['alignedRect'] instanceof FaceDetection
);
} }
function calculateFaceAngle(mesh: FaceLandmarks) { export function isWithFaceLandmarks(obj: any): obj is WithFaceLandmarks<WithFaceDetection<{}>, FaceLandmarks> {
// Helper to convert radians to degrees return isWithFaceDetection(obj)
// eslint-disable-next-line dot-notation
&& obj['landmarks'] instanceof FaceLandmarks
// eslint-disable-next-line dot-notation
&& obj['unshiftedLandmarks'] instanceof FaceLandmarks
// eslint-disable-next-line dot-notation
&& obj['alignedRect'] instanceof FaceDetection;
}
function calculateFaceAngle(mesh) {
// returns the angle in the plane (in radians) between the positive x-axis and the ray from (0,0) to the point (x,y)
const radians = (a1, a2, b1, b2) => (Math.atan2(b2 - a2, b1 - a1) % Math.PI);
// convert radians to degrees
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars // eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
const degrees = (radians: number) => (radians * 180) / Math.PI; const degrees = (theta) => (theta * 180) / Math.PI;
const calcLengthBetweenTwoPoints = (a: Point, b: Point) => Math.sqrt((a.x - b.x) ** 2 + (a.y - b.y) ** 2);
const angle = { const angle = { roll: <number | undefined>undefined, pitch: <number | undefined>undefined, yaw: <number | undefined>undefined };
roll: <number | undefined>undefined,
pitch: <number | undefined>undefined,
yaw: <number | undefined>undefined,
};
const calcYaw = (leftPoint: Point, midPoint: Point, rightPoint: Point) => { if (!mesh || !mesh._positions || mesh._positions.length !== 68) return angle;
// Calc x-distance from left side of the face ("ear") to facial midpoint ("nose") const pt = mesh._positions;
const leftToMidpoint = Math.floor(leftPoint.x - midPoint.x);
// Calc x-distance from facial midpoint ("nose") to the right side of the face ("ear")
const rightToMidpoint = Math.floor(midPoint.x - rightPoint.x);
// Difference in distances coincidentally approximates to angles
return leftToMidpoint - rightToMidpoint;
};
const calcRoll = (lever: Point, pivot: Point) => { // values are in radians in range of -pi/2 to pi/2 which is -90 to +90 degrees
// When rolling, the head seems to pivot from the nose/lips/chin area. // value of 0 means center
// So, we'll choose any two points from the facial midline, where the first point should be the pivot, and the other "lever"
// Plan/Execution: get the hypotenuse & opposite sides of a 90deg triangle ==> Calculate angle in radians
const hypotenuse = Math.hypot(pivot.x - lever.x, pivot.y - lever.y);
const opposite = pivot.y - lever.y;
const angleInRadians = Math.asin(opposite / hypotenuse);
const angleInDegrees = degrees(angleInRadians);
const normalizeAngle = Math.floor(90 - angleInDegrees);
// If lever more to the left of the pivot, then we're tilting left
// "-" is negative direction. "+", or absence of a sign is positive direction
const tiltDirection = pivot.x - lever.x < 0 ? -1 : 1;
const result = normalizeAngle * tiltDirection;
return result;
};
const calcPitch = (leftPoint: Point, midPoint: Point, rightPoint: Point) => { // roll is face lean from left to right
// Theory: While pitching, the nose is the most salient point --> That's what we'll use to make a trianle. // comparing x,y of outside corners of leftEye and rightEye
// The "base" is between point that don't move when we pitch our head (i.e. an imaginary line running ear to ear through the nose). angle.roll = -radians(pt[36]._x, pt[36]._y, pt[45]._x, pt[45]._y);
// Executuin: Get the opposite & adjacent lengths of the triangle from the ear's perspective. Use it to get angle.
const base = calcLengthBetweenTwoPoints(leftPoint, rightPoint); // pitch is face turn from left right
// adjecent is base/2 technically. // comparing x distance of top of nose to left and right edge of face
const baseCoords = new Point((leftPoint.x + rightPoint.x) / 2, (leftPoint.y + rightPoint.y) / 2); // precision is lacking since coordinates are not precise enough
const midToBaseLength = calcLengthBetweenTwoPoints(midPoint, baseCoords); angle.pitch = radians(0, Math.abs(pt[0]._x - pt[30]._x) / pt[30]._x, Math.PI, Math.abs(pt[16]._x - pt[30]._x) / pt[30]._x);
const angleInRadians = Math.atan(midToBaseLength / base);
const angleInDegrees = Math.floor(degrees(angleInRadians)); // yaw is face move from up to down
// Account for directionality. // comparing size of the box around the face with top and bottom of detected landmarks
// pitch forwards (_i.e. tilting your head forwards) is positive (or no sign); backward is negative. // silly hack, but this gives us face compression on y-axis
const direction = baseCoords.y - midPoint.y < 0 ? -1 : 1; // e.g., tilting head up hides the forehead that doesn't have any landmarks so ratio drops
const result = angleInDegrees * direction; const bottom = pt.reduce((prev, cur) => (prev < cur._y ? prev : cur._y), +Infinity);
return result; const top = pt.reduce((prev, cur) => (prev > cur._y ? prev : cur._y), -Infinity);
}; angle.yaw = Math.PI * (mesh._imgDims._height / (top - bottom) / 1.40 - 1);
if (!mesh || !mesh.positions || mesh.positions.length !== 68) return angle;
const pt = mesh.positions;
angle.roll = calcRoll(pt[27], pt[66]);
angle.pitch = calcPitch(pt[14], pt[30], pt[2]);
angle.yaw = calcYaw(pt[14], pt[33], pt[2]);
return angle; return angle;
} }
export function extendWithFaceLandmarks<TSource extends WithFaceDetection<{}>, TFaceLandmarks extends FaceLandmarks = FaceLandmarks68>( export function extendWithFaceLandmarks<
sourceObj: TSource, TSource extends WithFaceDetection<{}>,
unshiftedLandmarks: TFaceLandmarks, TFaceLandmarks extends FaceLandmarks = FaceLandmarks68 >(sourceObj: TSource, unshiftedLandmarks: TFaceLandmarks): WithFaceLandmarks<TSource, TFaceLandmarks> {
): WithFaceLandmarks<TSource, TFaceLandmarks> {
const { box: shift } = sourceObj.detection; const { box: shift } = sourceObj.detection;
const landmarks = unshiftedLandmarks.shiftBy<TFaceLandmarks>(shift.x, shift.y); const landmarks = unshiftedLandmarks.shiftBy<TFaceLandmarks>(shift.x, shift.y);
const rect = landmarks.align(); const rect = landmarks.align();
const { imageDims } = sourceObj.detection; const { imageDims } = sourceObj.detection;
const alignedRect = new FaceDetection( const alignedRect = new FaceDetection(sourceObj.detection.score, rect.rescale(imageDims.reverse()), imageDims);
sourceObj.detection.score,
rect.rescale(imageDims.reverse()),
imageDims,
);
const angle = calculateFaceAngle(unshiftedLandmarks); const angle = calculateFaceAngle(unshiftedLandmarks);
const extension = { landmarks, unshiftedLandmarks, alignedRect, angle };
const extension = {
landmarks,
unshiftedLandmarks,
alignedRect,
angle,
};
return { ...sourceObj, ...extension }; return { ...sourceObj, ...extension };
} }

View File

@ -21,7 +21,7 @@ export class ComputeFaceDescriptorsTaskBase<TReturn, TParentReturn> extends Comp
} }
export class ComputeAllFaceDescriptorsTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends ComputeFaceDescriptorsTaskBase<WithFaceDescriptor<TSource>[], TSource[]> { export class ComputeAllFaceDescriptorsTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends ComputeFaceDescriptorsTaskBase<WithFaceDescriptor<TSource>[], TSource[]> {
public override async run(): Promise<WithFaceDescriptor<TSource>[]> { public async run(): Promise<WithFaceDescriptor<TSource>[]> {
const parentResults = await this.parentTask; const parentResults = await this.parentTask;
const descriptors = await extractAllFacesAndComputeResults<TSource, Float32Array[]>( const descriptors = await extractAllFacesAndComputeResults<TSource, Float32Array[]>(
parentResults, parentResults,
@ -43,17 +43,20 @@ export class ComputeAllFaceDescriptorsTask<TSource extends WithFaceLandmarks<Wit
} }
export class ComputeSingleFaceDescriptorTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends ComputeFaceDescriptorsTaskBase<WithFaceDescriptor<TSource> | undefined, TSource | undefined> { export class ComputeSingleFaceDescriptorTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends ComputeFaceDescriptorsTaskBase<WithFaceDescriptor<TSource> | undefined, TSource | undefined> {
public override async run(): Promise<WithFaceDescriptor<TSource> | undefined> { public async run(): Promise<WithFaceDescriptor<TSource> | undefined> {
const parentResult = await this.parentTask; const parentResult = await this.parentTask;
if (!parentResult) return undefined; if (!parentResult) {
return undefined;
}
const descriptor = await extractSingleFaceAndComputeResult<TSource, Float32Array>( const descriptor = await extractSingleFaceAndComputeResult<TSource, Float32Array>(
parentResult, parentResult,
this.input, this.input,
(face) => nets.faceRecognitionNet.computeFaceDescriptor(face) as Promise<Float32Array>, (face) => nets.faceRecognitionNet.computeFaceDescriptor(face) as Promise<Float32Array>,
null, null,
// eslint-disable-next-line no-shadow, @typescript-eslint/no-shadow // eslint-disable-next-line no-shadow
(parentResult) => parentResult.landmarks.align(null, { useDlibAlignment: true }), (parentResult) => parentResult.landmarks.align(null, { useDlibAlignment: true }),
); );
return extendWithFaceDescriptor(parentResult, descriptor); return extendWithFaceDescriptor(parentResult, descriptor);
} }

View File

@ -33,18 +33,17 @@ export class DetectFaceLandmarksTaskBase<TReturn, TParentReturn> extends Composa
} }
export class DetectAllFaceLandmarksTask<TSource extends WithFaceDetection<{}>> extends DetectFaceLandmarksTaskBase<WithFaceLandmarks<TSource>[], TSource[]> { export class DetectAllFaceLandmarksTask<TSource extends WithFaceDetection<{}>> extends DetectFaceLandmarksTaskBase<WithFaceLandmarks<TSource>[], TSource[]> {
public override async run(): Promise<WithFaceLandmarks<TSource>[]> { public async run(): Promise<WithFaceLandmarks<TSource>[]> {
const parentResults = await this.parentTask; const parentResults = await this.parentTask;
const detections = parentResults.map((res) => res.detection); const detections = parentResults.map((res) => res.detection);
const faces: Array<HTMLCanvasElement | tf.Tensor3D> = this.input instanceof tf.Tensor const faces: Array<HTMLCanvasElement | tf.Tensor3D> = this.input instanceof tf.Tensor
? await extractFaceTensors(this.input, detections) ? await extractFaceTensors(this.input, detections)
: await extractFaces(this.input, detections); : await extractFaces(this.input, detections);
const faceLandmarksByFace = await Promise.all(faces.map((face) => this.landmarkNet.detectLandmarks(face))) as FaceLandmarks68[]; const faceLandmarksByFace = await Promise.all(
faces.map((face) => this.landmarkNet.detectLandmarks(face)),
) as FaceLandmarks68[];
faces.forEach((f) => f instanceof tf.Tensor && f.dispose()); faces.forEach((f) => f instanceof tf.Tensor && f.dispose());
const result = parentResults return parentResults.map((parentResult, i) => extendWithFaceLandmarks<TSource>(parentResult, faceLandmarksByFace[i]));
.filter((_parentResult, i) => faceLandmarksByFace[i])
.map((parentResult, i) => extendWithFaceLandmarks<TSource>(parentResult, faceLandmarksByFace[i]));
return result;
} }
withFaceExpressions() { withFaceExpressions() {
@ -61,7 +60,7 @@ export class DetectAllFaceLandmarksTask<TSource extends WithFaceDetection<{}>> e
} }
export class DetectSingleFaceLandmarksTask<TSource extends WithFaceDetection<{}>> extends DetectFaceLandmarksTaskBase<WithFaceLandmarks<TSource> | undefined, TSource | undefined> { export class DetectSingleFaceLandmarksTask<TSource extends WithFaceDetection<{}>> extends DetectFaceLandmarksTaskBase<WithFaceLandmarks<TSource> | undefined, TSource | undefined> {
public override async run(): Promise<WithFaceLandmarks<TSource> | undefined> { public async run(): Promise<WithFaceLandmarks<TSource> | undefined> {
const parentResult = await this.parentTask; const parentResult = await this.parentTask;
if (!parentResult) { if (!parentResult) {
return undefined; return undefined;

View File

@ -20,7 +20,7 @@ export class DetectFacesTaskBase<TReturn> extends ComposableTask<TReturn> {
} }
export class DetectAllFacesTask extends DetectFacesTaskBase<FaceDetection[]> { export class DetectAllFacesTask extends DetectFacesTaskBase<FaceDetection[]> {
public override async run(): Promise<FaceDetection[]> { public async run(): Promise<FaceDetection[]> {
const { input, options } = this; const { input, options } = this;
let result; let result;
if (options instanceof TinyFaceDetectorOptions) result = nets.tinyFaceDetector.locateFaces(input, options); if (options instanceof TinyFaceDetectorOptions) result = nets.tinyFaceDetector.locateFaces(input, options);
@ -62,7 +62,7 @@ export class DetectAllFacesTask extends DetectFacesTaskBase<FaceDetection[]> {
} }
export class DetectSingleFaceTask extends DetectFacesTaskBase<FaceDetection | undefined> { export class DetectSingleFaceTask extends DetectFacesTaskBase<FaceDetection | undefined> {
public override async run(): Promise<FaceDetection | undefined> { public async run(): Promise<FaceDetection | undefined> {
const faceDetections = await new DetectAllFacesTask(this.input, this.options); const faceDetections = await new DetectAllFacesTask(this.input, this.options);
let faceDetectionWithHighestScore = faceDetections[0]; let faceDetectionWithHighestScore = faceDetections[0];
faceDetections.forEach((faceDetection) => { faceDetections.forEach((faceDetection) => {

View File

@ -4,19 +4,38 @@ import { euclideanDistance } from '../euclideanDistance';
import { WithFaceDescriptor } from '../factories/index'; import { WithFaceDescriptor } from '../factories/index';
export class FaceMatcher { export class FaceMatcher {
private _labeledDescriptors: LabeledFaceDescriptors[]; private _labeledDescriptors: LabeledFaceDescriptors[]
private _distanceThreshold: number;
constructor(inputs: LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array | Array<LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array>, distanceThreshold = 0.6) { private _distanceThreshold: number
constructor(
inputs: LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array | Array<LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array>,
distanceThreshold = 0.6,
) {
this._distanceThreshold = distanceThreshold; this._distanceThreshold = distanceThreshold;
const inputArray = Array.isArray(inputs) ? inputs : [inputs]; const inputArray = Array.isArray(inputs) ? inputs : [inputs];
if (!inputArray.length) throw new Error('FaceRecognizer.constructor - expected atleast one input');
if (!inputArray.length) {
throw new Error('FaceRecognizer.constructor - expected atleast one input');
}
let count = 1; let count = 1;
const createUniqueLabel = () => `person ${count++}`; const createUniqueLabel = () => `person ${count++}`;
this._labeledDescriptors = inputArray.map((desc) => { this._labeledDescriptors = inputArray.map((desc) => {
if (desc instanceof LabeledFaceDescriptors) return desc; if (desc instanceof LabeledFaceDescriptors) {
if (desc instanceof Float32Array) return new LabeledFaceDescriptors(createUniqueLabel(), [desc]); return desc;
if (desc.descriptor && desc.descriptor instanceof Float32Array) return new LabeledFaceDescriptors(createUniqueLabel(), [desc.descriptor]); }
if (desc instanceof Float32Array) {
return new LabeledFaceDescriptors(createUniqueLabel(), [desc]);
}
if (desc.descriptor && desc.descriptor instanceof Float32Array) {
return new LabeledFaceDescriptors(createUniqueLabel(), [desc.descriptor]);
}
throw new Error('FaceRecognizer.constructor - expected inputs to be of type LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array | Array<LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array>'); throw new Error('FaceRecognizer.constructor - expected inputs to be of type LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array | Array<LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array>');
}); });
} }
@ -28,29 +47,36 @@ export class FaceMatcher {
public computeMeanDistance(queryDescriptor: Float32Array, descriptors: Float32Array[]): number { public computeMeanDistance(queryDescriptor: Float32Array, descriptors: Float32Array[]): number {
return descriptors return descriptors
.map((d) => euclideanDistance(d, queryDescriptor)) .map((d) => euclideanDistance(d, queryDescriptor))
.reduce((d1, d2) => d1 + d2, 0) / (descriptors.length || 1); .reduce((d1, d2) => d1 + d2, 0)
/ (descriptors.length || 1);
} }
public matchDescriptor(queryDescriptor: Float32Array): FaceMatch { public matchDescriptor(queryDescriptor: Float32Array): FaceMatch {
return this.labeledDescriptors return this.labeledDescriptors
.map(({ descriptors, label }) => new FaceMatch(label, this.computeMeanDistance(queryDescriptor, descriptors))) .map(({ descriptors, label }) => new FaceMatch(
label,
this.computeMeanDistance(queryDescriptor, descriptors),
))
.reduce((best, curr) => (best.distance < curr.distance ? best : curr)); .reduce((best, curr) => (best.distance < curr.distance ? best : curr));
} }
public findBestMatch(queryDescriptor: Float32Array): FaceMatch { public findBestMatch(queryDescriptor: Float32Array): FaceMatch {
const bestMatch = this.matchDescriptor(queryDescriptor); const bestMatch = this.matchDescriptor(queryDescriptor);
return (bestMatch.distance < this._distanceThreshold) ? bestMatch : new FaceMatch('unknown', bestMatch.distance); return bestMatch.distance < this.distanceThreshold
? bestMatch
: new FaceMatch('unknown', bestMatch.distance);
} }
public toJSON(): any { public toJSON(): any {
return { return {
distanceThreshold: this._distanceThreshold, distanceThreshold: this.distanceThreshold,
labeledDescriptors: this._labeledDescriptors.map((ld) => ld.toJSON()), labeledDescriptors: this.labeledDescriptors.map((ld) => ld.toJSON()),
}; };
} }
public static fromJSON(json: any): FaceMatcher { public static fromJSON(json: any): FaceMatcher {
const labeledDescriptors = json.labeledDescriptors.map((ld: any) => LabeledFaceDescriptors.fromJSON(ld)); const labeledDescriptors = json.labeledDescriptors
.map((ld: any) => LabeledFaceDescriptors.fromJSON(ld));
return new FaceMatcher(labeledDescriptors, json.distanceThreshold); return new FaceMatcher(labeledDescriptors, json.distanceThreshold);
} }
} }

View File

@ -27,7 +27,7 @@ export class PredictAgeAndGenderTaskBase<TReturn, TParentReturn> extends Composa
} }
export class PredictAllAgeAndGenderTask<TSource extends WithFaceDetection<{}>> extends PredictAgeAndGenderTaskBase<WithAge<WithGender<TSource>>[], TSource[]> { export class PredictAllAgeAndGenderTask<TSource extends WithFaceDetection<{}>> extends PredictAgeAndGenderTaskBase<WithAge<WithGender<TSource>>[], TSource[]> {
public override async run(): Promise<WithAge<WithGender<TSource>>[]> { public async run(): Promise<WithAge<WithGender<TSource>>[]> {
const parentResults = await this.parentTask; const parentResults = await this.parentTask;
const ageAndGenderByFace = await extractAllFacesAndComputeResults<TSource, AgeAndGenderPrediction[]>( const ageAndGenderByFace = await extractAllFacesAndComputeResults<TSource, AgeAndGenderPrediction[]>(
parentResults, parentResults,
@ -47,7 +47,7 @@ export class PredictAllAgeAndGenderTask<TSource extends WithFaceDetection<{}>> e
} }
export class PredictSingleAgeAndGenderTask<TSource extends WithFaceDetection<{}>> extends PredictAgeAndGenderTaskBase<WithAge<WithGender<TSource>> | undefined, TSource | undefined> { export class PredictSingleAgeAndGenderTask<TSource extends WithFaceDetection<{}>> extends PredictAgeAndGenderTaskBase<WithAge<WithGender<TSource>> | undefined, TSource | undefined> {
public override async run(): Promise<WithAge<WithGender<TSource>> | undefined> { public async run(): Promise<WithAge<WithGender<TSource>> | undefined> {
const parentResult = await this.parentTask; const parentResult = await this.parentTask;
if (!parentResult) return undefined; if (!parentResult) return undefined;
const { age, gender, genderProbability } = await extractSingleFaceAndComputeResult<TSource, AgeAndGenderPrediction>( const { age, gender, genderProbability } = await extractSingleFaceAndComputeResult<TSource, AgeAndGenderPrediction>(
@ -65,7 +65,7 @@ export class PredictSingleAgeAndGenderTask<TSource extends WithFaceDetection<{}>
} }
export class PredictAllAgeAndGenderWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictAllAgeAndGenderTask<TSource> { export class PredictAllAgeAndGenderWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictAllAgeAndGenderTask<TSource> {
override withFaceExpressions() { withFaceExpressions() {
return new PredictAllFaceExpressionsWithFaceAlignmentTask(this, this.input); return new PredictAllFaceExpressionsWithFaceAlignmentTask(this, this.input);
} }
@ -75,7 +75,7 @@ export class PredictAllAgeAndGenderWithFaceAlignmentTask<TSource extends WithFac
} }
export class PredictSingleAgeAndGenderWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictSingleAgeAndGenderTask<TSource> { export class PredictSingleAgeAndGenderWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictSingleAgeAndGenderTask<TSource> {
override withFaceExpressions() { withFaceExpressions() {
return new PredictSingleFaceExpressionsWithFaceAlignmentTask(this, this.input); return new PredictSingleFaceExpressionsWithFaceAlignmentTask(this, this.input);
} }

View File

@ -26,7 +26,7 @@ export class PredictFaceExpressionsTaskBase<TReturn, TParentReturn> extends Comp
} }
export class PredictAllFaceExpressionsTask<TSource extends WithFaceDetection<{}>> extends PredictFaceExpressionsTaskBase<WithFaceExpressions<TSource>[], TSource[]> { export class PredictAllFaceExpressionsTask<TSource extends WithFaceDetection<{}>> extends PredictFaceExpressionsTaskBase<WithFaceExpressions<TSource>[], TSource[]> {
public override async run(): Promise<WithFaceExpressions<TSource>[]> { public async run(): Promise<WithFaceExpressions<TSource>[]> {
const parentResults = await this.parentTask; const parentResults = await this.parentTask;
const faceExpressionsByFace = await extractAllFacesAndComputeResults<TSource, FaceExpressions[]>( const faceExpressionsByFace = await extractAllFacesAndComputeResults<TSource, FaceExpressions[]>(
@ -49,7 +49,7 @@ export class PredictAllFaceExpressionsTask<TSource extends WithFaceDetection<{}>
} }
export class PredictSingleFaceExpressionsTask<TSource extends WithFaceDetection<{}>> extends PredictFaceExpressionsTaskBase<WithFaceExpressions<TSource> | undefined, TSource | undefined> { export class PredictSingleFaceExpressionsTask<TSource extends WithFaceDetection<{}>> extends PredictFaceExpressionsTaskBase<WithFaceExpressions<TSource> | undefined, TSource | undefined> {
public override async run(): Promise<WithFaceExpressions<TSource> | undefined> { public async run(): Promise<WithFaceExpressions<TSource> | undefined> {
const parentResult = await this.parentTask; const parentResult = await this.parentTask;
if (!parentResult) { if (!parentResult) {
return undefined; return undefined;
@ -71,7 +71,7 @@ export class PredictSingleFaceExpressionsTask<TSource extends WithFaceDetection<
} }
export class PredictAllFaceExpressionsWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictAllFaceExpressionsTask<TSource> { export class PredictAllFaceExpressionsWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictAllFaceExpressionsTask<TSource> {
override withAgeAndGender() { withAgeAndGender() {
return new PredictAllAgeAndGenderWithFaceAlignmentTask(this, this.input); return new PredictAllAgeAndGenderWithFaceAlignmentTask(this, this.input);
} }
@ -81,7 +81,7 @@ export class PredictAllFaceExpressionsWithFaceAlignmentTask<TSource extends With
} }
export class PredictSingleFaceExpressionsWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictSingleFaceExpressionsTask<TSource> { export class PredictSingleFaceExpressionsWithFaceAlignmentTask<TSource extends WithFaceLandmarks<WithFaceDetection<{}>>> extends PredictSingleFaceExpressionsTask<TSource> {
override withAgeAndGender() { withAgeAndGender() {
return new PredictSingleAgeAndGenderWithFaceAlignmentTask(this, this.input); return new PredictSingleAgeAndGenderWithFaceAlignmentTask(this, this.input);
} }

View File

@ -22,7 +22,6 @@ export * from './euclideanDistance';
export * from './NeuralNetwork'; export * from './NeuralNetwork';
export * from './resizeResults'; export * from './resizeResults';
export const version = pkg.version as string; const node = (typeof process !== 'undefined');
const browser = (typeof navigator !== 'undefined') && (typeof navigator.userAgent !== 'undefined');
// set webgl defaults export const version = { faceapi: pkg.version as string, node, browser };
// if (browser) tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true);

View File

@ -8,24 +8,40 @@ import * as tf from '../../dist/tfjs.esm';
* both sides of the minor dimension oof the image. * both sides of the minor dimension oof the image.
* @returns The padded tensor with width === height. * @returns The padded tensor with width === height.
*/ */
export function padToSquare(imgTensor: tf.Tensor4D, isCenterImage = false): tf.Tensor4D { export function padToSquare(
imgTensor: tf.Tensor4D,
isCenterImage = false,
): tf.Tensor4D {
return tf.tidy(() => { return tf.tidy(() => {
const [height, width] = imgTensor.shape.slice(1); const [height, width] = imgTensor.shape.slice(1);
if (height === width) return imgTensor; if (height === width) {
return imgTensor;
}
const dimDiff = Math.abs(height - width); const dimDiff = Math.abs(height - width);
const paddingAmount = Math.round(dimDiff * (isCenterImage ? 0.5 : 1)); const paddingAmount = Math.round(dimDiff * (isCenterImage ? 0.5 : 1));
const paddingAxis = height > width ? 2 : 1; const paddingAxis = height > width ? 2 : 1;
const createPaddingTensor = (paddingAmountLocal: number): tf.Tensor => { const createPaddingTensor = (paddingAmountLocal: number): tf.Tensor => {
const paddingTensorShape = imgTensor.shape.slice(); const paddingTensorShape = imgTensor.shape.slice();
paddingTensorShape[paddingAxis] = paddingAmountLocal; paddingTensorShape[paddingAxis] = paddingAmountLocal;
return tf.fill(paddingTensorShape, 0, 'float32'); return tf.fill(paddingTensorShape, 0, 'float32');
}; };
const paddingTensorAppend = createPaddingTensor(paddingAmount); const paddingTensorAppend = createPaddingTensor(paddingAmount);
const remainingPaddingAmount = dimDiff - (paddingTensorAppend.shape[paddingAxis] as number); const remainingPaddingAmount = dimDiff - (paddingTensorAppend.shape[paddingAxis] as number);
const paddingTensorPrepend = isCenterImage && remainingPaddingAmount ? createPaddingTensor(remainingPaddingAmount) : null;
const tensorsToStack = [paddingTensorPrepend, imgTensor, paddingTensorAppend] const paddingTensorPrepend = isCenterImage && remainingPaddingAmount
? createPaddingTensor(remainingPaddingAmount)
: null;
const tensorsToStack = [
paddingTensorPrepend,
imgTensor,
paddingTensorAppend,
]
.filter((t) => !!t) .filter((t) => !!t)
.map((t) => tf.cast(t as tf.Tensor4D, 'float32')) as tf.Tensor4D[]; .map((t: tf.Tensor) => tf.cast(t, 'float32')) as tf.Tensor4D[];
return tf.concat(tensorsToStack, paddingAxis); return tf.concat(tensorsToStack, paddingAxis);
}); });
} }

View File

@ -4,11 +4,11 @@ export interface ISsdMobilenetv1Options {
} }
export class SsdMobilenetv1Options { export class SsdMobilenetv1Options {
protected _name = 'SsdMobilenetv1Options'; protected _name = 'SsdMobilenetv1Options'
private _minConfidence: number; private _minConfidence: number
private _maxResults: number; private _maxResults: number
constructor({ minConfidence, maxResults }: ISsdMobilenetv1Options = {}) { constructor({ minConfidence, maxResults }: ISsdMobilenetv1Options = {}) {
this._minConfidence = minConfidence || 0.5; this._minConfidence = minConfidence || 0.5;

View File

@ -1,15 +1,7 @@
/** /* eslint-disable import/no-extraneous-dependencies */
* Creates tfjs bundle used by Human browser build target /* eslint-disable node/no-unpublished-import */
* @external
*/ // wrapper to load tfjs in a single place so version can be changed quickly
// export all from build bundle
export * from '@tensorflow/tfjs/dist/index.js'; export * from '@tensorflow/tfjs/dist/index.js';
export * from '@tensorflow/tfjs-backend-webgl/dist/index.js'; export * from '@tensorflow/tfjs-backend-wasm';
export * from '@tensorflow/tfjs-backend-wasm/dist/index.js';
// add webgpu to bundle, experimental
// export * from '@tensorflow/tfjs-backend-webgpu/dist/index.js';
// export versions, overrides version object from @tensorflow/tfjs
export { version } from '../../dist/tfjs.version.js';

Some files were not shown because too many files have changed in this diff Show More