Compare commits
214 Commits
Author | SHA1 | Date |
---|---|---|
![]() |
189226d63a | |
![]() |
f587b44f66 | |
![]() |
e3f11b8533 | |
![]() |
171d17cadf | |
![]() |
e4cdf624c9 | |
![]() |
c633f9fbe4 | |
![]() |
ffc3c40362 | |
![]() |
a8193f9077 | |
![]() |
155f07dccd | |
![]() |
2f0469fe6e | |
![]() |
697b265337 | |
![]() |
4719b81587 | |
![]() |
fc9a39ea13 | |
![]() |
438897c5a2 | |
![]() |
f4d4780267 | |
![]() |
a5c767fdff | |
![]() |
1fa29b0fd3 | |
![]() |
472f2e4480 | |
![]() |
4433ce44bc | |
![]() |
4ca829f941 | |
![]() |
038349968c | |
![]() |
ae96c7b230 | |
![]() |
f9f036ba01 | |
![]() |
0736a99250 | |
![]() |
3ea729badb | |
![]() |
d36ed6d266 | |
![]() |
4061d4d62f | |
![]() |
b034c46f80 | |
![]() |
aefd776a9e | |
![]() |
20eb54beb4 | |
![]() |
e8301c5277 | |
![]() |
fba823ba50 | |
![]() |
a1cb6de1e8 | |
![]() |
fb3836019f | |
![]() |
15ae496f40 | |
![]() |
0009d1bc34 | |
![]() |
adc4b3a11d | |
![]() |
7e5a1289ff | |
![]() |
cd2c553737 | |
![]() |
a433fc0681 | |
![]() |
f9902b0459 | |
![]() |
bd5ab6bb0f | |
![]() |
96fed4f123 | |
![]() |
0cbfd9b01b | |
![]() |
dea225bbeb | |
![]() |
602e86cbec | |
![]() |
00bf49b24f | |
![]() |
fa33c1281c | |
![]() |
7f613367a3 | |
![]() |
4d65f459f9 | |
![]() |
d28e5d2142 | |
![]() |
6aeb292453 | |
![]() |
289faf17f2 | |
![]() |
7a6f7d96b7 | |
![]() |
870eebedfa | |
![]() |
1ed702f713 | |
![]() |
b2a988e436 | |
![]() |
5c38676a83 | |
![]() |
bac0ef10cf | |
![]() |
8baef0ef68 | |
![]() |
c5dbb9d4e9 | |
![]() |
a8021dc2a3 | |
![]() |
f946780bab | |
![]() |
8e7061a9aa | |
![]() |
cd904ca5dd | |
![]() |
496779fee2 | |
![]() |
4ba4a99ee1 | |
![]() |
31170e750b | |
![]() |
5f58cd376d | |
![]() |
07eb00d7d6 | |
![]() |
a1f7a0841f | |
![]() |
49a594a59b | |
![]() |
3b3ab219dc | |
![]() |
2fce7338dc | |
![]() |
6cafeafba1 | |
![]() |
d0f1349a23 | |
![]() |
cdb0e485f8 | |
![]() |
5bcc4d2a73 | |
![]() |
92008ed6f4 | |
![]() |
c1b38f99fe | |
![]() |
0c5251c219 | |
![]() |
fcf61e5c30 | |
![]() |
8c7e21b1c9 | |
![]() |
2841969df8 | |
![]() |
39b137ed63 | |
![]() |
c53becfc67 | |
![]() |
fd427cce39 | |
![]() |
43805b50c6 | |
![]() |
fc18d89ab6 | |
![]() |
0de113080c | |
![]() |
471ddb7549 | |
![]() |
70991235df | |
![]() |
c07be32e26 | |
![]() |
936ecba7ec | |
![]() |
63476fcbc0 | |
![]() |
62da12758f | |
![]() |
bd4d5935fe | |
![]() |
118fbaba4d | |
![]() |
e70d9bb18b | |
![]() |
f1a2ef34a5 | |
![]() |
e7fd0efd27 | |
![]() |
eb5501c672 | |
![]() |
8b304fa3d4 | |
![]() |
1824a62efb | |
![]() |
bd2317d42e | |
![]() |
1def723c7b | |
![]() |
d78dd3aae1 | |
![]() |
461e074993 | |
![]() |
1d30a9f816 | |
![]() |
fcbfc8589a | |
![]() |
c7b2c65c97 | |
![]() |
1b4580dd6e | |
![]() |
fdddee7101 | |
![]() |
aee959f464 | |
![]() |
f70e5615b4 | |
![]() |
4ba43e08ae | |
![]() |
c3049e7c29 | |
![]() |
e2609a0ef2 | |
![]() |
d13586f549 | |
![]() |
519e346f02 | |
![]() |
efb307d230 | |
![]() |
47f2b53e92 | |
![]() |
9b810d8028 | |
![]() |
f48cbda416 | |
![]() |
ac172b8be5 | |
![]() |
2c8c8c2c1c | |
![]() |
9fb3029211 | |
![]() |
225192d18d | |
![]() |
8dab959446 | |
![]() |
42d9d677de | |
![]() |
d5b366629b | |
![]() |
1455c35c81 | |
![]() |
953ef705ab | |
![]() |
00803107ce | |
![]() |
2ac6baa02b | |
![]() |
7ef748390c | |
![]() |
b4ba10898f | |
![]() |
df47b3e2a9 | |
![]() |
76daa38bce | |
![]() |
e13a6d684b | |
![]() |
da426d5cfd | |
![]() |
1de3551a0b | |
![]() |
98ea06fb0e | |
![]() |
bf84748777 | |
![]() |
25735fcb34 | |
![]() |
7b8b30bfc9 | |
![]() |
107297015e | |
![]() |
b9c78b21b0 | |
![]() |
1c577b6ede | |
![]() |
b0d195dd57 | |
![]() |
f0aefed9e6 | |
![]() |
158dbc6208 | |
![]() |
b8830e8cd3 | |
![]() |
1410be346a | |
![]() |
11b0685c9b | |
![]() |
5c13f14b05 | |
![]() |
33fc169fa6 | |
![]() |
47cb1aac88 | |
![]() |
e496c9789f | |
![]() |
6f9db4cd09 | |
![]() |
3bce447141 | |
![]() |
0304c9c2f1 | |
![]() |
98b8963505 | |
![]() |
ab8478837d | |
![]() |
5f2aa0456c | |
![]() |
48b626b76c | |
![]() |
cbeeca675d | |
![]() |
8e61c418e6 | |
![]() |
9773e3557a | |
![]() |
c188e2f9d8 | |
![]() |
9cf903a5cf | |
![]() |
8942b0752c | |
![]() |
99c9ea0b75 | |
![]() |
ed465fc042 | |
![]() |
05de572e79 | |
![]() |
7615b6b234 | |
![]() |
5e88795227 | |
![]() |
d1e5e71079 | |
![]() |
62f123da0a | |
![]() |
dd024e0ebf | |
![]() |
c15e6a5ba4 | |
![]() |
efd2019e19 | |
![]() |
40b3a65bdc | |
![]() |
23bdd3f086 | |
![]() |
eaa298211e | |
![]() |
f47c05cc13 | |
![]() |
18a16a9f2c | |
![]() |
2ad4fc24db | |
![]() |
9ccaf781ab | |
![]() |
09698a891b | |
![]() |
1b68ca1160 | |
![]() |
d85c913347 | |
![]() |
325e3852e7 | |
![]() |
8053e5de99 | |
![]() |
98c07fa123 | |
![]() |
796ba2dda3 | |
![]() |
4ea115ea0d | |
![]() |
4a6572f3ba | |
![]() |
090a1d9e4b | |
![]() |
5a1cc87be2 | |
![]() |
748998c921 | |
![]() |
2bbfd8490a | |
![]() |
3238d8b26c | |
![]() |
527c0de84c | |
![]() |
ee6f3398a4 | |
![]() |
d29d073c4e | |
![]() |
ad61f77ea2 | |
![]() |
8bc4f095f4 | |
![]() |
cd022855eb | |
![]() |
e43d1b9472 | |
![]() |
de7ef14bf5 | |
![]() |
e22ba62899 | |
![]() |
6e115bb37f | |
![]() |
863c6fcd7a |
|
@ -0,0 +1,148 @@
|
||||||
|
{
|
||||||
|
"log": {
|
||||||
|
"enabled": false,
|
||||||
|
"debug": false,
|
||||||
|
"console": true,
|
||||||
|
"output": "build.log"
|
||||||
|
},
|
||||||
|
"profiles": {
|
||||||
|
"production": ["compile", "typings", "typedoc", "lint", "changelog"],
|
||||||
|
"development": ["serve", "watch", "compile"]
|
||||||
|
},
|
||||||
|
"clean": {
|
||||||
|
"locations": ["dist/*", "typedoc/*", "types/lib/src"]
|
||||||
|
},
|
||||||
|
"lint": {
|
||||||
|
"locations": [ "src/" ],
|
||||||
|
"rules": { }
|
||||||
|
},
|
||||||
|
"changelog": {
|
||||||
|
"log": "CHANGELOG.md"
|
||||||
|
},
|
||||||
|
"serve": {
|
||||||
|
"sslKey": "cert/https.key",
|
||||||
|
"sslCrt": "cert/https.crt",
|
||||||
|
"httpPort": 8000,
|
||||||
|
"httpsPort": 8001,
|
||||||
|
"documentRoot": ".",
|
||||||
|
"defaultFolder": "demo",
|
||||||
|
"defaultFile": "index.html"
|
||||||
|
},
|
||||||
|
"build": {
|
||||||
|
"global": {
|
||||||
|
"target": "es2018",
|
||||||
|
"treeShaking": true,
|
||||||
|
"ignoreAnnotations": true,
|
||||||
|
"sourcemap": false,
|
||||||
|
"banner": { "js": "/*\n Face-API\n homepage: <https://github.com/vladmandic/face-api>\n author: <https://github.com/vladmandic>'\n*/\n" }
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"name": "tfjs/browser/tf-version",
|
||||||
|
"platform": "browser",
|
||||||
|
"format": "esm",
|
||||||
|
"input": "src/tfjs/tf-version.ts",
|
||||||
|
"output": "dist/tfjs.version.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "tfjs/node/cpu",
|
||||||
|
"platform": "node",
|
||||||
|
"format": "cjs",
|
||||||
|
"input": "src/tfjs/tf-node.ts",
|
||||||
|
"output": "dist/tfjs.esm.js",
|
||||||
|
"external": ["@tensorflow"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "faceapi/node/cpu",
|
||||||
|
"platform": "node",
|
||||||
|
"format": "cjs",
|
||||||
|
"input": "src/index.ts",
|
||||||
|
"output": "dist/face-api.node.js",
|
||||||
|
"external": ["@tensorflow"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "tfjs/node/gpu",
|
||||||
|
"platform": "node",
|
||||||
|
"format": "cjs",
|
||||||
|
"input": "src/tfjs/tf-node-gpu.ts",
|
||||||
|
"output": "dist/tfjs.esm.js",
|
||||||
|
"external": ["@tensorflow"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "faceapi/node/gpu",
|
||||||
|
"platform": "node",
|
||||||
|
"format": "cjs",
|
||||||
|
"input": "src/index.ts",
|
||||||
|
"output": "dist/face-api.node-gpu.js",
|
||||||
|
"external": ["@tensorflow"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "tfjs/node/wasm",
|
||||||
|
"platform": "node",
|
||||||
|
"format": "cjs",
|
||||||
|
"input": "src/tfjs/tf-node-wasm.ts",
|
||||||
|
"output": "dist/tfjs.esm.js",
|
||||||
|
"external": ["@tensorflow"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "faceapi/node/wasm",
|
||||||
|
"platform": "node",
|
||||||
|
"format": "cjs",
|
||||||
|
"input": "src/index.ts",
|
||||||
|
"output": "dist/face-api.node-wasm.js",
|
||||||
|
"external": ["@tensorflow"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "tfjs/browser/esm/nobundle",
|
||||||
|
"platform": "browser",
|
||||||
|
"format": "esm",
|
||||||
|
"input": "src/tfjs/tf-browser.ts",
|
||||||
|
"output": "dist/tfjs.esm.js",
|
||||||
|
"external": ["@tensorflow"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "faceapi/browser/esm/nobundle",
|
||||||
|
"platform": "browser",
|
||||||
|
"format": "esm",
|
||||||
|
"input": "src/index.ts",
|
||||||
|
"output": "dist/face-api.esm-nobundle.js",
|
||||||
|
"external": ["@tensorflow"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "tfjs/browser/esm/bundle",
|
||||||
|
"platform": "browser",
|
||||||
|
"format": "esm",
|
||||||
|
"input": "src/tfjs/tf-browser.ts",
|
||||||
|
"output": "dist/tfjs.esm.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "faceapi/browser/iife/bundle",
|
||||||
|
"platform": "browser",
|
||||||
|
"format": "iife",
|
||||||
|
"globalName": "faceapi",
|
||||||
|
"minify": true,
|
||||||
|
"input": "src/index.ts",
|
||||||
|
"output": "dist/face-api.js",
|
||||||
|
"external": ["@tensorflow"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "faceapi/browser/esm/bundle",
|
||||||
|
"platform": "browser",
|
||||||
|
"format": "esm",
|
||||||
|
"sourcemap": true,
|
||||||
|
"input": "src/index.ts",
|
||||||
|
"output": "dist/face-api.esm.js",
|
||||||
|
"typings": "types/lib",
|
||||||
|
"typedoc": "typedoc",
|
||||||
|
"external": ["@tensorflow"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"watch": {
|
||||||
|
"enabled": true,
|
||||||
|
"locations": [ "src/**" ]
|
||||||
|
},
|
||||||
|
"typescript": {
|
||||||
|
"allowJs": false
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,50 +3,74 @@
|
||||||
"env": {
|
"env": {
|
||||||
"browser": true,
|
"browser": true,
|
||||||
"commonjs": true,
|
"commonjs": true,
|
||||||
"es6": true,
|
|
||||||
"node": true,
|
"node": true,
|
||||||
"es2020": true
|
"es2020": true
|
||||||
},
|
},
|
||||||
"parser": "@typescript-eslint/parser",
|
"parser": "@typescript-eslint/parser",
|
||||||
"parserOptions": { "ecmaVersion": 2020 },
|
"parserOptions": { "ecmaVersion": "latest" },
|
||||||
"plugins": ["@typescript-eslint"],
|
"plugins": [
|
||||||
|
"@typescript-eslint"
|
||||||
|
],
|
||||||
"extends": [
|
"extends": [
|
||||||
"eslint:recommended",
|
"eslint:recommended",
|
||||||
"plugin:import/errors",
|
"plugin:import/errors",
|
||||||
"plugin:import/warnings",
|
"plugin:import/warnings",
|
||||||
"plugin:import/typescript",
|
|
||||||
"plugin:node/recommended",
|
"plugin:node/recommended",
|
||||||
"plugin:promise/recommended",
|
"plugin:promise/recommended",
|
||||||
"plugin:json/recommended-with-comments",
|
"plugin:@typescript-eslint/eslint-recommended",
|
||||||
|
"plugin:@typescript-eslint/recommended",
|
||||||
"airbnb-base"
|
"airbnb-base"
|
||||||
],
|
],
|
||||||
"ignorePatterns": [ "node_modules", "types" ],
|
"ignorePatterns": [ "node_modules", "types" ],
|
||||||
"settings": {
|
|
||||||
"import/resolver": {
|
|
||||||
"node": {
|
|
||||||
"extensions": [".js", ".ts"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"rules": {
|
"rules": {
|
||||||
"max-len": [1, 275, 3],
|
"@typescript-eslint/no-explicit-any": "off",
|
||||||
|
"@typescript-eslint/ban-types": "off",
|
||||||
|
"@typescript-eslint/ban-ts-comment": "off",
|
||||||
|
"@typescript-eslint/explicit-module-boundary-types": "off",
|
||||||
|
"@typescript-eslint/no-var-requires": "off",
|
||||||
|
"@typescript-eslint/no-empty-object-type": "off",
|
||||||
|
"@typescript-eslint/no-require-imports": "off",
|
||||||
"camelcase": "off",
|
"camelcase": "off",
|
||||||
"class-methods-use-this": "off",
|
"class-methods-use-this": "off",
|
||||||
|
"default-param-last": "off",
|
||||||
|
"dot-notation": "off",
|
||||||
|
"func-names": "off",
|
||||||
|
"guard-for-in": "off",
|
||||||
"import/extensions": "off",
|
"import/extensions": "off",
|
||||||
"import/no-cycle": "off",
|
"import/no-extraneous-dependencies": "off",
|
||||||
|
"import/no-named-as-default": "off",
|
||||||
|
"import/no-unresolved": "off",
|
||||||
"import/prefer-default-export": "off",
|
"import/prefer-default-export": "off",
|
||||||
|
"lines-between-class-members": "off",
|
||||||
|
"max-len": [1, 275, 3],
|
||||||
|
"newline-per-chained-call": "off",
|
||||||
|
"no-async-promise-executor": "off",
|
||||||
"no-await-in-loop": "off",
|
"no-await-in-loop": "off",
|
||||||
|
"no-bitwise": "off",
|
||||||
|
"no-case-declarations":"off",
|
||||||
"no-continue": "off",
|
"no-continue": "off",
|
||||||
|
"no-loop-func": "off",
|
||||||
"no-mixed-operators": "off",
|
"no-mixed-operators": "off",
|
||||||
"no-param-reassign": "off",
|
"no-param-reassign":"off",
|
||||||
"no-plusplus": "off",
|
"no-plusplus": "off",
|
||||||
|
"no-regex-spaces": "off",
|
||||||
|
"no-restricted-globals": "off",
|
||||||
"no-restricted-syntax": "off",
|
"no-restricted-syntax": "off",
|
||||||
"no-return-assign": "off",
|
"no-return-assign": "off",
|
||||||
"no-underscore-dangle": "off",
|
"no-underscore-dangle": "off",
|
||||||
"node/no-missing-import": "off",
|
"no-promise-executor-return": "off",
|
||||||
|
"node/no-missing-import": ["error", { "tryExtensions": [".js", ".json", ".ts"] }],
|
||||||
|
"node/no-unpublished-import": "off",
|
||||||
|
"node/no-unpublished-require": "off",
|
||||||
"node/no-unsupported-features/es-syntax": "off",
|
"node/no-unsupported-features/es-syntax": "off",
|
||||||
|
"no-lonely-if": "off",
|
||||||
|
"node/shebang": "off",
|
||||||
|
"object-curly-newline": "off",
|
||||||
"prefer-destructuring": "off",
|
"prefer-destructuring": "off",
|
||||||
"radix": "off",
|
"prefer-template":"off",
|
||||||
"object-curly-newline": "off"
|
"promise/always-return": "off",
|
||||||
|
"promise/catch-or-return": "off",
|
||||||
|
"promise/no-nesting": "off",
|
||||||
|
"radix": "off"
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: [vladmandic]
|
||||||
|
patreon: # Replace with a single Patreon username
|
||||||
|
open_collective: # Replace with a single Open Collective username
|
||||||
|
ko_fi: # Replace with a single Ko-fi username
|
||||||
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
|
otechie: # Replace with a single Otechie username
|
||||||
|
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||||
|
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
|
@ -1 +0,0 @@
|
||||||
Please include output of `faceapi.version` object or specify details about your version and platform (OS, NodeJS version, Browser version).
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
---
|
||||||
|
name: Issue
|
||||||
|
about: Issue
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: vladmandic
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Issue Description**
|
||||||
|
|
||||||
|
**Steps to Reproduce**
|
||||||
|
|
||||||
|
**Expected Behavior**
|
||||||
|
|
||||||
|
**Environment
|
||||||
|
|
||||||
|
- Module version?
|
||||||
|
- Built-in demo or custom code?
|
||||||
|
- Type of module used (e.g. `js`, `esm`, `esm-nobundle`)?
|
||||||
|
- Browser or NodeJS and version (e.g. NodeJS 14.15 or Chrome 89)?
|
||||||
|
- OS and Hardware platform (e.g. Windows 10, Ubuntu Linux on x64, Android 10)?
|
||||||
|
- Packager (if any) (e.g, webpack, rollup, parcel, esbuild, etc.)?
|
||||||
|
|
||||||
|
**Additional**
|
||||||
|
|
||||||
|
- For installation or startup issues include your `package.json`
|
||||||
|
- For usage issues, it is recommended to post your code as [gist](https://gist.github.com/)
|
|
@ -0,0 +1,3 @@
|
||||||
|
# Pull Request Template
|
||||||
|
|
||||||
|
<br>
|
|
@ -0,0 +1,67 @@
|
||||||
|
# For most projects, this workflow file will not need changing; you simply need
|
||||||
|
# to commit it to your repository.
|
||||||
|
#
|
||||||
|
# You may wish to alter this file to override the set of languages analyzed,
|
||||||
|
# or to provide custom queries or build logic.
|
||||||
|
#
|
||||||
|
# ******** NOTE ********
|
||||||
|
# We have attempted to detect the languages in your repository. Please check
|
||||||
|
# the `language` matrix defined below to confirm you have the correct set of
|
||||||
|
# supported CodeQL languages.
|
||||||
|
#
|
||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ master ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [ master ]
|
||||||
|
schedule:
|
||||||
|
- cron: '21 6 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'javascript' ]
|
||||||
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
||||||
|
# Learn more:
|
||||||
|
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v1
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v1
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 https://git.io/JvXDl
|
||||||
|
|
||||||
|
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||||
|
# and modify them (or add more) to build your code if your project
|
||||||
|
# uses a compiled language
|
||||||
|
|
||||||
|
#- run: |
|
||||||
|
# make bootstrap
|
||||||
|
# make release
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v1
|
|
@ -1 +1,2 @@
|
||||||
node_modules
|
node_modules
|
||||||
|
pnpm-lock.yaml
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"extends": [
|
||||||
|
"web-recommended"
|
||||||
|
],
|
||||||
|
"browserslist": [
|
||||||
|
"last 1 versions",
|
||||||
|
"not ie < 20"
|
||||||
|
],
|
||||||
|
"hints": {
|
||||||
|
"no-inline-styles": "off",
|
||||||
|
"meta-charset-utf-8": "off"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,5 @@
|
||||||
|
node_modules
|
||||||
|
pnpm-lock.yaml
|
||||||
|
typedoc
|
||||||
|
test
|
||||||
|
types/lib
|
|
@ -0,0 +1,5 @@
|
||||||
|
force=true
|
||||||
|
production=true
|
||||||
|
legacy-peer-deps=true
|
||||||
|
strict-peer-dependencies=false
|
||||||
|
node-options='--no-deprecation'
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"typescript.tsdk": "node_modules/typescript/lib"
|
||||||
|
}
|
360
CHANGELOG.md
|
@ -1,123 +1,317 @@
|
||||||
# @vladmandic/face-api
|
# @vladmandic/face-api
|
||||||
|
|
||||||
Version: **1.0.1**
|
Version: **1.7.15**
|
||||||
Description: **FaceAPI: AI-powered Face Detection, Face Embedding & Recognition Using Tensorflow/JS**
|
Description: **FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS**
|
||||||
|
|
||||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||||
License: **MIT** </LICENSE>
|
License: **MIT**
|
||||||
Repository: **<git+https://github.com/vladmandic/face-api.git>**
|
Repository: **<https://github.com/vladmandic/face-api>**
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
|
### **1.7.15** 2025/02/05 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **origin/master** 2024/09/10 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.7.14** 2024/09/10 mandic00@live.com
|
||||||
|
|
||||||
|
- rebuild
|
||||||
|
- merge pull request #188 from rebser/master
|
||||||
|
- fixing leaking eventhandlers when using htmlcanvaselement
|
||||||
|
- rebuild types
|
||||||
|
- rebuild
|
||||||
|
|
||||||
|
### **1.7.13** 2024/01/17 mandic00@live.com
|
||||||
|
|
||||||
|
- merge pull request #186 from khwalkowicz/master
|
||||||
|
- feat: enable noimplicitany
|
||||||
|
|
||||||
|
### **release: 1.7.12** 2023/06/12 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.7.12** 2023/06/12 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.7.11** 2023/05/08 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.7.10** 2023/03/21 mandic00@live.com
|
||||||
|
|
||||||
|
- change typedefs
|
||||||
|
|
||||||
|
### **1.7.9** 2023/01/29 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.7.8** 2023/01/06 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.7.7** 2022/12/01 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.7.6** 2022/10/18 mandic00@live.com
|
||||||
|
|
||||||
|
- fix face angles (yaw, pitch, & roll) accuracy (#130)
|
||||||
|
|
||||||
|
### **1.7.5** 2022/10/09 mandic00@live.com
|
||||||
|
|
||||||
|
- create funding.yml
|
||||||
|
- add node-wasm demo
|
||||||
|
|
||||||
|
### **1.7.4** 2022/09/25 mandic00@live.com
|
||||||
|
|
||||||
|
- improve face compare performance
|
||||||
|
|
||||||
|
### **1.7.3** 2022/08/24 mandic00@live.com
|
||||||
|
|
||||||
|
- refresh release
|
||||||
|
|
||||||
|
### **1.7.2** 2022/08/23 mandic00@live.com
|
||||||
|
|
||||||
|
- document and remove optional dependencies
|
||||||
|
|
||||||
|
### **release: 1.7.1** 2022/07/25 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.7.1** 2022/07/25 mandic00@live.com
|
||||||
|
|
||||||
|
- refactor dependencies
|
||||||
|
- full rebuild
|
||||||
|
|
||||||
|
### **1.6.11** 2022/05/24 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.6.10** 2022/05/24 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.6.9** 2022/05/18 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.6.8** 2022/05/09 mandic00@live.com
|
||||||
|
|
||||||
|
- exclude impossible detected face boxes
|
||||||
|
|
||||||
|
### **1.6.7** 2022/04/01 mandic00@live.com
|
||||||
|
|
||||||
|
- fixed typo error (#97)
|
||||||
|
|
||||||
|
### **1.6.6** 2022/03/04 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.6.5** 2022/02/07 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.6.4** 2022/01/14 mandic00@live.com
|
||||||
|
|
||||||
|
- add node with wasm build target
|
||||||
|
|
||||||
|
### **1.6.3** 2022/01/06 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.6.2** 2022/01/01 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.6.1** 2021/12/09 mandic00@live.com
|
||||||
|
|
||||||
|
- rebuild
|
||||||
|
- release preview
|
||||||
|
- switch to custom tfjs and new typedefs
|
||||||
|
- rebuild
|
||||||
|
|
||||||
|
### **1.5.8** 2021/11/30 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.5.7** 2021/10/28 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.5.6** 2021/10/22 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **release: 1.5.5** 2021/10/19 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.5.5** 2021/10/19 mandic00@live.com
|
||||||
|
|
||||||
|
- allow backend change in demo via url params
|
||||||
|
- add node-match demo
|
||||||
|
- fix face matcher
|
||||||
|
|
||||||
|
### **1.5.4** 2021/09/29 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.5.3** 2021/09/16 mandic00@live.com
|
||||||
|
|
||||||
|
- simplify tfjs imports
|
||||||
|
- reduce bundle size
|
||||||
|
- enable webgl uniforms
|
||||||
|
|
||||||
|
### **1.5.2** 2021/09/10 mandic00@live.com
|
||||||
|
|
||||||
|
- redesign build platform
|
||||||
|
|
||||||
|
### **1.5.1** 2021/09/08 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.4.2** 2021/08/31 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **release: 1.4.1** 2021/07/29 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.4.1** 2021/07/29 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **release: 1.3.1** 2021/06/18 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.3.1** 2021/06/08 mandic00@live.com
|
||||||
|
|
||||||
|
- fix face expression detection (#56)
|
||||||
|
- add buffertovideo
|
||||||
|
- fix git conflicts
|
||||||
|
- fix tsc error (#55)
|
||||||
|
- force typescript 4.2 due to typedoc incompatibility with ts 4.3
|
||||||
|
|
||||||
|
### **1.2.5** 2021/05/27 mandic00@live.com
|
||||||
|
|
||||||
|
- add buffertovideo and fetchvideo (#54)
|
||||||
|
|
||||||
|
### **1.2.4** 2021/05/18 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.2.3** 2021/05/04 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **update for tfjs 3.6.0** 2021/04/30 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.2.2** 2021/04/30 mandic00@live.com
|
||||||
|
|
||||||
|
- add node-wasm demo
|
||||||
|
- accept uri as input to demo node and node-canvas
|
||||||
|
- major version full rebuild
|
||||||
|
|
||||||
|
### **1.2.1** 2021/04/22 mandic00@live.com
|
||||||
|
|
||||||
|
- add npmrc
|
||||||
|
- add canvas/image based demo to decode webp
|
||||||
|
|
||||||
|
### **1.1.12** 2021/04/13 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.1.11** 2021/04/06 mandic00@live.com
|
||||||
|
|
||||||
|
- merge pull request #46 from mayankagarwals/demo_latencytest_fix
|
||||||
|
- fixed bug which led to latency not being measured and wrong output on console for demo
|
||||||
|
- add cdn links
|
||||||
|
|
||||||
|
### **1.1.10** 2021/04/04 mandic00@live.com
|
||||||
|
|
||||||
|
- added webhints
|
||||||
|
|
||||||
|
### **1.1.9** 2021/04/03 mandic00@live.com
|
||||||
|
|
||||||
|
- fix linting and tests
|
||||||
|
|
||||||
|
### **1.1.8** 2021/04/01 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.1.7** 2021/03/31 mandic00@live.com
|
||||||
|
|
||||||
|
- enable minify
|
||||||
|
|
||||||
|
### **1.1.6** 2021/03/26 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.1.5** 2021/03/23 mandic00@live.com
|
||||||
|
|
||||||
|
- add node-canvas demo
|
||||||
|
- refactoring
|
||||||
|
|
||||||
|
### **1.1.4** 2021/03/18 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.1.3** 2021/03/16 mandic00@live.com
|
||||||
|
|
||||||
|
- fix for seedrandom
|
||||||
|
|
||||||
|
### **1.1.2** 2021/03/15 mandic00@live.com
|
||||||
|
|
||||||
|
- create templates
|
||||||
|
- create codeql-analysis.yml
|
||||||
|
|
||||||
|
### **1.1.1** 2021/03/14 mandic00@live.com
|
||||||
|
|
||||||
|
- full rebuild
|
||||||
|
- reformatted model manifests and weights
|
||||||
|
- create api specs
|
||||||
|
|
||||||
|
### **1.0.2** 2021/03/09 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **release: 1.0.1** 2021/03/09 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
### **1.0.1** 2021/03/09 mandic00@live.com
|
### **1.0.1** 2021/03/09 mandic00@live.com
|
||||||
|
|
||||||
- add badges
|
- add badges
|
||||||
- optimize for npm
|
- optimize for npm
|
||||||
|
- 0.30.6
|
||||||
### **0.30.6** 2021/03/08 mandic00@live.com
|
|
||||||
|
|
||||||
- added typings for face angle
|
- added typings for face angle
|
||||||
- disable landmark printing
|
- disable landmark printing
|
||||||
|
- 0.30.5
|
||||||
### **0.30.5** 2021/03/07 mandic00@live.com
|
|
||||||
|
|
||||||
- enabled live demo on gitpages
|
- enabled live demo on gitpages
|
||||||
|
- 0.30.4
|
||||||
### **0.30.4** 2021/03/07 mandic00@live.com
|
|
||||||
|
|
||||||
- added face angle calculations
|
- added face angle calculations
|
||||||
- added documentation
|
- added documentation
|
||||||
- package update
|
- package update
|
||||||
|
- 0.30.3
|
||||||
### **0.30.3** 2021/03/04 mandic00@live.com
|
- 0.30.2
|
||||||
|
- 0.30.1
|
||||||
|
- 0.13.3
|
||||||
### **0.30.2** 2021/02/26 mandic00@live.com
|
|
||||||
|
|
||||||
|
|
||||||
### **0.30.1** 2021/02/25 mandic00@live.com
|
|
||||||
|
|
||||||
|
|
||||||
### **0.13.3** 2021/02/21 mandic00@live.com
|
|
||||||
|
|
||||||
- added note-cpu target
|
- added note-cpu target
|
||||||
- merge pull request #39 from xemle/feature/node-cpu
|
- merge pull request #39 from xemle/feature/node-cpu
|
||||||
- add node-cpu build for non supported systems of libtensorflow
|
- add node-cpu build for non supported systems of libtensorflow
|
||||||
|
- 0.13.2
|
||||||
### **0.13.2** 2021/02/20 mandic00@live.com
|
- 0.13.1
|
||||||
|
- 0.12.10
|
||||||
|
- exception handling
|
||||||
### **0.13.1** 2021/02/20 mandic00@live.com
|
- 0.12.9
|
||||||
|
- exception handling
|
||||||
|
- 0.12.8
|
||||||
### **0.12.10** 2021/02/20 mandic00@live.com
|
|
||||||
|
|
||||||
- exception handling
|
- exception handling
|
||||||
|
|
||||||
### **0.12.9** 2021/02/20 mandic00@live.com
|
|
||||||
|
|
||||||
|
|
||||||
### **0.12.8** 2021/02/20 mandic00@live.com
|
|
||||||
|
|
||||||
|
|
||||||
### **0.12.7** 2021/02/17 mandic00@live.com
|
### **0.12.7** 2021/02/17 mandic00@live.com
|
||||||
|
|
||||||
- 0.12.7
|
- 0.12.7
|
||||||
|
- 0.12.6
|
||||||
### **0.12.6** 2021/02/13 mandic00@live.com
|
- 0.12.5
|
||||||
|
- 0.12.4
|
||||||
|
- 0.12.3
|
||||||
### **0.12.5** 2021/02/12 mandic00@live.com
|
- 0.12.2
|
||||||
|
|
||||||
|
|
||||||
### **0.12.4** 2021/02/06 mandic00@live.com
|
|
||||||
|
|
||||||
|
|
||||||
### **0.12.3** 2021/02/06 mandic00@live.com
|
|
||||||
|
|
||||||
|
|
||||||
### **0.12.2** 2021/02/02 mandic00@live.com
|
|
||||||
|
|
||||||
|
|
||||||
### **update for tfjs 3.0.0** 2021/01/29 mandic00@live.com
|
### **update for tfjs 3.0.0** 2021/01/29 mandic00@live.com
|
||||||
|
|
||||||
|
- 0.12.1
|
||||||
### **0.12.1** 2021/01/29 mandic00@live.com
|
|
||||||
|
|
||||||
- rebuild
|
- rebuild
|
||||||
|
- 0.11.6
|
||||||
### **0.11.6** 2021/01/24 mandic00@live.com
|
|
||||||
|
|
||||||
- add check for null face descriptor
|
- add check for null face descriptor
|
||||||
- merge pull request #34 from patrickhulce/patch-1
|
- merge pull request #34 from patrickhulce/patch-1
|
||||||
- fix: return empty descriptor for zero-sized faces
|
- fix: return empty descriptor for zero-sized faces
|
||||||
|
- 0.11.5
|
||||||
### **0.11.5** 2021/01/22 mandic00@live.com
|
- 0.11.4
|
||||||
|
- 0.11.3
|
||||||
|
|
||||||
### **0.11.4** 2021/01/22 mandic00@live.com
|
|
||||||
|
|
||||||
|
|
||||||
### **0.11.3** 2021/01/20 mandic00@live.com
|
|
||||||
|
|
||||||
- fix typo
|
- fix typo
|
||||||
- enable full minification
|
- enable full minification
|
||||||
|
- 0.11.2
|
||||||
### **0.11.2** 2021/01/12 mandic00@live.com
|
|
||||||
|
|
||||||
- full rebuild
|
- full rebuild
|
||||||
|
- 0.11.1
|
||||||
### **0.11.1** 2021/01/10 mandic00@live.com
|
|
||||||
|
|
||||||
- added live webcam demo
|
- added live webcam demo
|
||||||
|
- 0.10.2
|
||||||
### **0.10.2** 2021/01/03 mandic00@live.com
|
|
||||||
|
|
||||||
- ts linting
|
- ts linting
|
||||||
- version bump
|
- version bump
|
||||||
|
- 0.10.1
|
||||||
### **0.10.1** 2020/12/23 mandic00@live.com
|
|
||||||
|
|
||||||
- full re-lint and typings generation
|
- full re-lint and typings generation
|
||||||
- rebuild
|
- rebuild
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
# Code of Conduct
|
||||||
|
|
||||||
|
Use your best judgement
|
||||||
|
If it will possibly make others uncomfortable, do not post it
|
||||||
|
|
||||||
|
- Be respectful
|
||||||
|
Disagreement is not an opportunity to attack someone else's thoughts or opinions
|
||||||
|
Although views may differ, remember to approach every situation with patience and care
|
||||||
|
- Be considerate
|
||||||
|
Think about how your contribution will affect others in the community
|
||||||
|
- Be open minded
|
||||||
|
Embrace new people and new ideas. Our community is continually evolving and we welcome positive change
|
||||||
|
|
||||||
|
Be mindful of your language
|
||||||
|
Any of the following behavior is unacceptable:
|
||||||
|
|
||||||
|
- Offensive comments of any kind
|
||||||
|
- Threats or intimidation
|
||||||
|
- Sexually explicit material
|
||||||
|
- Or any other kinds of harassment
|
||||||
|
|
||||||
|
If you believe someone is violating the code of conduct, we ask that you report it
|
||||||
|
|
||||||
|
Participants asked to stop any harassing behavior are expected to comply immediately
|
|
@ -0,0 +1,17 @@
|
||||||
|
# Contributing Guidelines
|
||||||
|
|
||||||
|
Pull requests from everyone are welcome
|
||||||
|
|
||||||
|
Procedure for contributing:
|
||||||
|
|
||||||
|
- Create a fork of the repository on github
|
||||||
|
In a top right corner of a GitHub, select "Fork"
|
||||||
|
- Clone your forked repository to your local system
|
||||||
|
`git clone https://github.com/<your-username>/<your-fork>
|
||||||
|
- Make your changes
|
||||||
|
- Test your changes against code guidelines
|
||||||
|
`npm run lint`
|
||||||
|
- Push changes to your fork
|
||||||
|
- Submit a PR (pull request)
|
||||||
|
|
||||||
|
Your pull request will be reviewed and pending review results, merged into main branch
|
2
LICENSE
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2018 Vincent Mühler
|
Copyright (c) Vladimir Mandic
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
428
README.md
|
@ -1,62 +1,26 @@
|
||||||

|

|
||||||

|

|
||||||

|

|
||||||

|

|
||||||

|

|
||||||
|

|
||||||
|
|
||||||
# FaceAPI
|
# FaceAPI
|
||||||
|
|
||||||
**Face detection and recognition libary for Browser and NodeJS implemented on top of TensorFlow/JS**
|
**AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS**
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
**Live Demo**: <https://vladmandic.github.io/face-api/example/webcam.html>
|
**Live Demo**: <https://vladmandic.github.io/face-api/demo/webcam.html>
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
## Note
|
## Additional Documentation
|
||||||
|
|
||||||
This is updated **face-api.js** with latest available TensorFlow/JS as the original face-api.js is not compatible with **tfjs 2.0+**.
|
- [**Tutorial**](TUTORIAL.md)
|
||||||
|
- [**TypeDoc API Specification**](https://vladmandic.github.io/face-api/typedoc/index.html)
|
||||||
|
|
||||||
Forked from [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2** which was released on March 22nd, 2020
|
<br><hr><br>
|
||||||
|
|
||||||
Currently based on **`TensorFlow/JS` 3.2.0**
|
|
||||||
|
|
||||||
### Why?
|
|
||||||
|
|
||||||
Because I needed Face-API that does not cause version conflict with newer TFJS 2.0 that I use accross my projects
|
|
||||||
And since original Face-API was open-source, I've released this version as well
|
|
||||||
|
|
||||||
Unfortunately, changes ended up being too large for a simple pull request on original Face-API and it ended up being a full-fledged version on its own
|
|
||||||
|
|
||||||
### Differences
|
|
||||||
|
|
||||||
- Compatible with `TensorFlow/JS 2.0+ & 3.0+`
|
|
||||||
- Compatible with `WebGL`, `CPU` and `WASM` TFJS Browser backends
|
|
||||||
- Compatible with both `tfjs-node` and `tfjs-node-gpu` TFJS NodeJS backends
|
|
||||||
- Updated all type castings for TypeScript type checking to `TypeScript 4.1`
|
|
||||||
- Switched bundling from `UMD` to `ESM` + `CommonJS` with fallback to `IIFE`
|
|
||||||
Resulting code is optimized per-platform instead of being universal
|
|
||||||
Fully tree shakable when imported as an `ESM` module
|
|
||||||
Browser bundle process uses `ESBuild` instead of `Rollup`
|
|
||||||
- Typescript build process now targets `ES2018` and instead of dual ES5/ES6
|
|
||||||
Resulting code is clean ES2018 JavaScript without polyfills
|
|
||||||
- Removed old tests, docs, examples
|
|
||||||
- Removed old package dependencies (`karma`, `jasmine`, `babel`, etc.)
|
|
||||||
- Updated all package dependencies
|
|
||||||
- Updated TensorFlow/JS dependencies since backends were removed from `@tensorflow/tfjs-core`
|
|
||||||
- Updated mobileNetv1 model due to `batchNorm()` dependency
|
|
||||||
- Added `version` class that returns JSON object with version of FaceAPI as well as linked TFJS
|
|
||||||
- Added test/dev built-in HTTP & HTTPS Web server
|
|
||||||
- Removed `mtcnn` and `tinyYolov2` models as they were non-functional in latest public version of `Face-API`
|
|
||||||
*If there is a demand, I can re-implement them back.*
|
|
||||||
- Added `face angle` calculations that returns `roll`, `yaw` and `pitch`
|
|
||||||
|
|
||||||
Which means valid models are **tinyFaceDetector** and **mobileNetv1**
|
|
||||||
|
|
||||||
<br>
|
|
||||||
<hr>
|
|
||||||
<br>
|
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
|
@ -64,70 +28,103 @@ Which means valid models are **tinyFaceDetector** and **mobileNetv1**
|
||||||
|
|
||||||
### Browser
|
### Browser
|
||||||
|
|
||||||
Browser example that uses static images and showcases both models as well as all of the extensions is included in `/example/index.html`
|
Browser example that uses static images and showcases both models
|
||||||
Example can be accessed directly using Git pages using URL: <https://vladmandic.github.io/face-api/example/index.html>
|
as well as all of the extensions is included in `/demo/index.html`
|
||||||
|
Example can be accessed directly using Git pages using URL:
|
||||||
|
<https://vladmandic.github.io/face-api/demo/index.html>
|
||||||
|
|
||||||
Browser example that uses live webcam is included in `/example/webcam.html`
|
Browser example that uses live webcam is included in `/demo/webcam.html`
|
||||||
Example can be accessed directly using Git pages using URL: <https://vladmandic.github.io/face-api/example/webcam.html>
|
Example can be accessed directly using Git pages using URL:
|
||||||
|
<https://vladmandic.github.io/face-api/demo/webcam.html>
|
||||||
|
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
|
**Demo using FaceAPI to process images**
|
||||||
*Note: Photos shown below are taken by me*
|
*Note: Photos shown below are taken by me*
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
**Demo using FaceAPI to process live webcam**
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
### NodeJS
|
### NodeJS
|
||||||
|
|
||||||
Two NodeJS examples are:
|
NodeJS examples are:
|
||||||
|
|
||||||
- `/example/node-singleprocess.js`: Regular usage of `FaceAPI` from `NodeJS`
|
- `/demo/node-simple.js`:
|
||||||
- `/example/node-multiprocess.js`: Multiprocessing showcase that uses pool of worker processes (`node-multiprocess-worker.js`
|
Simplest possible NodeJS demo for FaceAPI in under 30 lines of JavaScript code
|
||||||
Main starts fixed pool of worker processes with each worker having it's instance of `FaceAPI`
|
- `/demo/node.js`:
|
||||||
Workers communicate with main when they are ready and main dispaches job to each ready worker until job queue is empty
|
Using `TFJS` native methods to load images without external dependencies
|
||||||
|
- `/demo/node-canvas.js` and `/demo/node-image.js`:
|
||||||
|
Using external `canvas` module to load images
|
||||||
|
Which also allows for image drawing and saving inside `NodeJS` environment
|
||||||
|
- `/demo/node-match.js`:
|
||||||
|
Simple demo that compares face similarity from a given image
|
||||||
|
to a second image or list of images in a folder
|
||||||
|
- `/demo/node-multiprocess.js`:
|
||||||
|
Multiprocessing showcase that uses pool of worker processes
|
||||||
|
(`node-multiprocess-worker.js`)
|
||||||
|
Main starts fixed pool of worker processes with each worker having
|
||||||
|
it's instance of `FaceAPI`
|
||||||
|
Workers communicate with main when they are ready and main dispaches
|
||||||
|
job to each ready worker until job queue is empty
|
||||||
|
|
||||||
```json
|
```json
|
||||||
2020-12-08 08:30:01 INFO: @vladmandic/face-api version 0.9.1
|
2021-03-14 08:42:03 INFO: @vladmandic/face-api version 1.0.2
|
||||||
2020-12-08 08:30:01 INFO: User: vlado Platform: linux Arch: x64 Node: v15.0.1
|
2021-03-14 08:42:03 INFO: User: vlado Platform: linux Arch: x64 Node: v15.7.0
|
||||||
2020-12-08 08:30:01 INFO: FaceAPI multi-process test
|
2021-03-14 08:42:03 INFO: FaceAPI multi-process test
|
||||||
2020-12-08 08:30:01 STATE: Main: started worker: 265238
|
2021-03-14 08:42:03 STATE: Main: started worker: 1888019
|
||||||
2020-12-08 08:30:01 STATE: Main: started worker: 265244
|
2021-03-14 08:42:03 STATE: Main: started worker: 1888025
|
||||||
2020-12-08 08:30:02 STATE: Worker: PID: 265238 TensorFlow/JS 2.7.0 FaceAPI 0.9.1 Backend: tensorflow
|
2021-03-14 08:42:04 STATE: Worker: PID: 1888025 TensorFlow/JS 3.3.0 FaceAPI 1.0.2 Backend: tensorflow
|
||||||
2020-12-08 08:30:02 STATE: Worker: PID: 265244 TensorFlow/JS 2.7.0 FaceAPI 0.9.1 Backend: tensorflow
|
2021-03-14 08:42:04 STATE: Worker: PID: 1888019 TensorFlow/JS 3.3.0 FaceAPI 1.0.2 Backend: tensorflow
|
||||||
2020-12-08 08:30:02 STATE: Main: dispatching to worker: 265238
|
2021-03-14 08:42:04 STATE: Main: dispatching to worker: 1888019
|
||||||
2020-12-08 08:30:02 STATE: Main: dispatching to worker: 265244
|
2021-03-14 08:42:04 STATE: Main: dispatching to worker: 1888025
|
||||||
2020-12-08 08:30:02 DATA: Worker received message: 265238 { image: 'example/sample (1).jpg' }
|
2021-03-14 08:42:04 DATA: Worker received message: 1888019 { image: 'demo/sample1.jpg' }
|
||||||
2020-12-08 08:30:02 DATA: Worker received message: 265244 { image: 'example/sample (2).jpg' }
|
2021-03-14 08:42:04 DATA: Worker received message: 1888025 { image: 'demo/sample2.jpg' }
|
||||||
2020-12-08 08:30:04 DATA: Main: worker finished: 265238 detected faces: 3
|
2021-03-14 08:42:06 DATA: Main: worker finished: 1888025 detected faces: 3
|
||||||
2020-12-08 08:30:04 STATE: Main: dispatching to worker: 265238
|
2021-03-14 08:42:06 STATE: Main: dispatching to worker: 1888025
|
||||||
2020-12-08 08:30:04 DATA: Main: worker finished: 265244 detected faces: 3
|
2021-03-14 08:42:06 DATA: Worker received message: 1888025 { image: 'demo/sample3.jpg' }
|
||||||
2020-12-08 08:30:04 STATE: Main: dispatching to worker: 265244
|
2021-03-14 08:42:06 DATA: Main: worker finished: 1888019 detected faces: 3
|
||||||
2020-12-08 08:30:04 DATA: Worker received message: 265238 { image: 'example/sample (3).jpg' }
|
2021-03-14 08:42:06 STATE: Main: dispatching to worker: 1888019
|
||||||
2020-12-08 08:30:04 DATA: Worker received message: 265244 { image: 'example/sample (4).jpg' }
|
2021-03-14 08:42:06 DATA: Worker received message: 1888019 { image: 'demo/sample4.jpg' }
|
||||||
2020-12-08 08:30:06 DATA: Main: worker finished: 265238 detected faces: 3
|
2021-03-14 08:42:07 DATA: Main: worker finished: 1888025 detected faces: 3
|
||||||
2020-12-08 08:30:06 STATE: Main: dispatching to worker: 265238
|
2021-03-14 08:42:07 STATE: Main: dispatching to worker: 1888025
|
||||||
2020-12-08 08:30:06 DATA: Worker received message: 265238 { image: 'example/sample (5).jpg' }
|
2021-03-14 08:42:07 DATA: Worker received message: 1888025 { image: 'demo/sample5.jpg' }
|
||||||
2020-12-08 08:30:06 DATA: Main: worker finished: 265244 detected faces: 4
|
2021-03-14 08:42:08 DATA: Main: worker finished: 1888019 detected faces: 4
|
||||||
2020-12-08 08:30:06 STATE: Main: dispatching to worker: 265244
|
2021-03-14 08:42:08 STATE: Main: dispatching to worker: 1888019
|
||||||
2020-12-08 08:30:06 DATA: Worker received message: 265244 { image: 'example/sample (6).jpg' }
|
2021-03-14 08:42:08 DATA: Worker received message: 1888019 { image: 'demo/sample6.jpg' }
|
||||||
2020-12-08 08:30:07 DATA: Main: worker finished: 265238 detected faces: 5
|
2021-03-14 08:42:09 DATA: Main: worker finished: 1888025 detected faces: 5
|
||||||
2020-12-08 08:30:07 STATE: Main: worker exit: 265238 0
|
2021-03-14 08:42:09 STATE: Main: worker exit: 1888025 0
|
||||||
2020-12-08 08:30:08 DATA: Main: worker finished: 265244 detected faces: 4
|
2021-03-14 08:42:09 DATA: Main: worker finished: 1888019 detected faces: 4
|
||||||
2020-12-08 08:30:08 INFO: Processed 12 images in 6826 ms
|
2021-03-14 08:42:09 INFO: Processed 15 images in 5944 ms
|
||||||
2020-12-08 08:30:08 STATE: Main: worker exit: 265244 0
|
2021-03-14 08:42:09 STATE: Main: worker exit: 1888019 0
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu` must be installed before using NodeJS example
|
### NodeJS Notes
|
||||||
|
- Supported NodeJS versions are **14** up to **22**
|
||||||
|
NodeJS version **23** and higher are not supported due to incompatibility with TensorFlow/JS
|
||||||
|
- `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu`
|
||||||
|
must be installed before using any **NodeJS** examples
|
||||||
|
|
||||||
<br>
|
<br><hr><br>
|
||||||
<hr>
|
|
||||||
<br>
|
## Quick Start
|
||||||
|
|
||||||
|
Simply include latest version of `FaceAPI` directly from a CDN in your HTML:
|
||||||
|
(pick one, `jsdelivr` or `unpkg`)
|
||||||
|
|
||||||
|
```html
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/@vladmandic/face-api/dist/face-api.js"></script>
|
||||||
|
<script src="https://unpkg.dev/@vladmandic/face-api/dist/face-api.js"></script>
|
||||||
|
```
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Face-API ships with several pre-build versions of the library:
|
`FaceAPI` ships with several pre-build versions of the library:
|
||||||
|
|
||||||
- `dist/face-api.js`: IIFE format for client-side Browser execution
|
- `dist/face-api.js`: IIFE format for client-side Browser execution
|
||||||
*with* TFJS pre-bundled
|
*with* TFJS pre-bundled
|
||||||
|
@ -139,8 +136,6 @@ Face-API ships with several pre-build versions of the library:
|
||||||
*without* TFJS pre-bundled
|
*without* TFJS pre-bundled
|
||||||
- `dist/face-api.node-gpu.js`: CommonJS format for server-side NodeJS execution
|
- `dist/face-api.node-gpu.js`: CommonJS format for server-side NodeJS execution
|
||||||
*without* TFJS pre-bundled and optimized for CUDA GPU acceleration
|
*without* TFJS pre-bundled and optimized for CUDA GPU acceleration
|
||||||
- `dist/face-api.node-cpu.js`: CommonJS format for server-side NodeJS execution
|
|
||||||
*without* TFJS pre-bundled and using JS engine for platforms where tensorflow binary library version is not available
|
|
||||||
|
|
||||||
Defaults are:
|
Defaults are:
|
||||||
|
|
||||||
|
@ -154,29 +149,34 @@ Defaults are:
|
||||||
|
|
||||||
Bundled `TFJS` can be used directly via export: `faceapi.tf`
|
Bundled `TFJS` can be used directly via export: `faceapi.tf`
|
||||||
|
|
||||||
Reason for additional `nobundle` version is if you want to include a specific version of TFJS and not rely on pre-packaged one
|
Reason for additional `nobundle` version is if you want to
|
||||||
|
include a specific version of TFJS and not rely on pre-packaged one
|
||||||
|
|
||||||
`FaceAPI` is compatible with TFJS 2.0+
|
`FaceAPI` is compatible with TFJS 2.0+ and TFJS 3.0+
|
||||||
|
|
||||||
All versions include `sourcemap` and `asset manifest`
|
All versions include `sourcemap`
|
||||||
|
|
||||||
<br>
|
<br><hr><br>
|
||||||
<hr>
|
|
||||||
<br>
|
|
||||||
|
|
||||||
There are several ways to use Face-API:
|
There are several ways to use FaceAPI:
|
||||||
|
|
||||||
### 1. IIFE script
|
### 1. IIFE script
|
||||||
|
|
||||||
*Recommened for quick tests and backward compatibility with older Browsers that do not support ESM such as IE*
|
*Recommened for quick tests and backward compatibility with older Browsers that do not support ESM such as IE*
|
||||||
|
|
||||||
This is simplest way for usage within Browser
|
This is simplest way for usage within Browser
|
||||||
Simply download `dist/face-api.js`, include it in your `HTML` file & it's ready to use
|
Simply download `dist/face-api.js`, include it in your `HTML` file & it's ready to use:
|
||||||
|
|
||||||
```html
|
```html
|
||||||
<script src="dist/face-api.js"><script>
|
<script src="dist/face-api.js"><script>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Or skip the download and include it directly from a CDN:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/@vladmandic/face-api/dist/face-api.js"></script>
|
||||||
|
```
|
||||||
|
|
||||||
IIFE script bundles TFJS and auto-registers global namespace `faceapi` within Window object which can be accessed directly from a `<script>` tag or from your JS file.
|
IIFE script bundles TFJS and auto-registers global namespace `faceapi` within Window object which can be accessed directly from a `<script>` tag or from your JS file.
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
@ -235,7 +235,7 @@ or to use non-bundled version
|
||||||
|
|
||||||
*Recommended for NodeJS projects*
|
*Recommended for NodeJS projects*
|
||||||
|
|
||||||
*Node: Face-API for NodeJS does not bundle TFJS due to binary dependencies that are installed during TFJS installation*
|
*Node: FaceAPI for NodeJS does not bundle TFJS due to binary dependencies that are installed during TFJS installation*
|
||||||
|
|
||||||
Install with:
|
Install with:
|
||||||
|
|
||||||
|
@ -258,10 +258,10 @@ If you want to force CommonJS module instead of relying on `recommended` field:
|
||||||
```
|
```
|
||||||
|
|
||||||
If you want to GPU Accelerated execution in NodeJS, you must have CUDA libraries already installed and working
|
If you want to GPU Accelerated execution in NodeJS, you must have CUDA libraries already installed and working
|
||||||
Then install appropriate version of `Face-API`:
|
Then install appropriate version of `FaceAPI`:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
npm install @tensorflow/tfjs-node
|
npm install @tensorflow/tfjs-node-gpu
|
||||||
npm install @vladmandic/face-api
|
npm install @vladmandic/face-api
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -272,21 +272,30 @@ And then use with:
|
||||||
const faceapi = require('@vladmandic/face-api/dist/face-api.node-gpu.js'); // this loads face-api version with correct bindings for tfjs-node-gpu
|
const faceapi = require('@vladmandic/face-api/dist/face-api.node-gpu.js'); // this loads face-api version with correct bindings for tfjs-node-gpu
|
||||||
```
|
```
|
||||||
|
|
||||||
If you want to use `Face-API` in a NodeJS on platforms where NodeJS binary libraries are not supported, you can use JavaScript CPU backend.
|
If you want to use `FaceAPI` in a NodeJS on platforms where **tensorflow** binary libraries are not supported, you can use NodeJS **WASM** backend.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
npm install @tensorflow/tfjs
|
npm install @tensorflow/tfjs
|
||||||
|
npm install @tensorflow/tfjs-backend-wasm
|
||||||
npm install @vladmandic/face-api
|
npm install @vladmandic/face-api
|
||||||
```
|
```
|
||||||
|
|
||||||
And then use with:
|
And then use with:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const tf = require('@tensorflow/tfjs')
|
const tf = require('@tensorflow/tfjs');
|
||||||
const faceapi = require('@vladmandic/face-api/dist/face-api.node-cpu.js');
|
const wasm = require('@tensorflow/tfjs-backend-wasm');
|
||||||
|
const faceapi = require('@vladmandic/face-api/dist/face-api.node-wasm.js'); // use this when using face-api in dev mode
|
||||||
|
wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/');
|
||||||
|
await tf.setBackend('wasm');
|
||||||
|
await tf.ready();
|
||||||
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
If you want to use graphical functions inside NodeJS, you must provide appropriate graphical library as NodeJS does not include implementation for DOM elements such as HTMLImageElement or HTMLCanvasElement:
|
If you want to use graphical functions inside NodeJS,
|
||||||
|
you must provide appropriate graphical library as
|
||||||
|
NodeJS does not include implementation for DOM elements
|
||||||
|
such as HTMLImageElement or HTMLCanvasElement:
|
||||||
|
|
||||||
Install `Canvas` for NodeJS:
|
Install `Canvas` for NodeJS:
|
||||||
|
|
||||||
|
@ -304,20 +313,18 @@ const { Canvas, Image, ImageData } = canvas
|
||||||
faceapi.env.monkeyPatch({ Canvas, Image, ImageData })
|
faceapi.env.monkeyPatch({ Canvas, Image, ImageData })
|
||||||
```
|
```
|
||||||
|
|
||||||
<br>
|
<br><hr><br>
|
||||||
<hr>
|
|
||||||
<br>
|
|
||||||
|
|
||||||
## Weights
|
## Weights
|
||||||
|
|
||||||
Pretrained models and their weights are includes in `./model`.
|
Pretrained models and their weights are included in `./model`.
|
||||||
|
|
||||||
<br>
|
<br><hr><br>
|
||||||
<hr>
|
|
||||||
<br>
|
|
||||||
|
|
||||||
## Test & Dev Web Server
|
## Test & Dev Web Server
|
||||||
|
|
||||||
|
To install development dependencies, use `npm install --production=false`
|
||||||
|
|
||||||
Built-in test&dev web server can be started using
|
Built-in test&dev web server can be started using
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
@ -326,26 +333,53 @@ npm run dev
|
||||||
|
|
||||||
By default it starts HTTP server on port 8000 and HTTPS server on port 8001 and can be accessed as:
|
By default it starts HTTP server on port 8000 and HTTPS server on port 8001 and can be accessed as:
|
||||||
|
|
||||||
- <https://localhost:8001/example/index.html>
|
- <https://localhost:8001/demo/index.html>
|
||||||
- <https://localhost:8001/example/webcam.html>
|
- <https://localhost:8001/demo/webcam.html>
|
||||||
|
|
||||||
```json
|
```js
|
||||||
2021-01-10 08:39:00 INFO: @vladmandic/face-api version 0.10.2
|
2022-01-14 09:56:19 INFO: @vladmandic/face-api version 1.6.4
|
||||||
2021-01-10 08:39:00 INFO: User: vlado Platform: linux Arch: x64 Node: v15.4.0
|
2022-01-14 09:56:19 INFO: User: vlado Platform: linux Arch: x64 Node: v17.2.0
|
||||||
2021-01-10 08:39:00 INFO: Build: file startup all target: es2018
|
2022-01-14 09:56:19 INFO: Application: { name: '@vladmandic/face-api', version: '1.6.4' }
|
||||||
2021-01-10 08:39:00 STATE: HTTP server listening: 8000
|
2022-01-14 09:56:19 INFO: Environment: { profile: 'development', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true }
|
||||||
2021-01-10 08:39:00 STATE: HTTP2 server listening: 8001
|
2022-01-14 09:56:19 INFO: Toolchain: { build: '0.6.7', esbuild: '0.14.11', typescript: '4.5.4', typedoc: '0.22.10', eslint: '8.6.0' }
|
||||||
2021-01-10 08:39:00 STATE: Monitoring: [ 'package.json', 'config.js', 'example', 'src', [length]: 4 ]
|
2022-01-14 09:56:19 INFO: Build: { profile: 'development', steps: [ 'serve', 'watch', 'compile' ] }
|
||||||
2021-01-10 08:39:00 STATE: Monitoring: [ 'package.json', 'config.js', 'example', 'src', [length]: 4 ]
|
2022-01-14 09:56:19 STATE: WebServer: { ssl: false, port: 8000, root: '.' }
|
||||||
2021-01-10 08:39:01 STATE: Build for: browserBundle type: tfjs: { modules: 1253, moduleBytes: 3997175, imports: 7, importBytes: 276, outputBytes: 1565414, outputFiles: 'dist/tfjs.esm.js' }
|
2022-01-14 09:56:19 STATE: WebServer: { ssl: true, port: 8001, root: '.', sslKey: 'build/cert/https.key', sslCrt: 'build/cert/https.crt' }
|
||||||
2021-01-10 08:39:01 STATE: Build for: browserBundle type: iife: { imports: 160, importBytes: 1797487, outputBytes: 1699552, outputFiles: 'dist/face-api.js' }
|
2022-01-14 09:56:19 STATE: Watch: { locations: [ 'src/**', 'README.md', 'src/**', 'src/**' ] }
|
||||||
2021-01-10 08:39:01 STATE: Build for: browserBundle type: esm: { imports: 160, importBytes: 1797487, outputBytes: 1697086, outputFiles: 'dist/face-api.esm.js' }
|
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 1276 }
|
||||||
2021-01-10 08:39:01 INFO: Compile: [ 'src/index.ts', [length]: 1 ]
|
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234787, outputBytes: 175203 }
|
||||||
|
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 1296 }
|
||||||
|
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234807, outputBytes: 175219 }
|
||||||
|
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 1367 }
|
||||||
|
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234878, outputBytes: 175294 }
|
||||||
|
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 1662 }
|
||||||
|
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 2172, outputBytes: 811 }
|
||||||
|
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234322, outputBytes: 169437 }
|
||||||
|
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 2172, outputBytes: 2444105 }
|
||||||
|
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 2677616, outputBytes: 1252572 }
|
||||||
|
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 2677616, outputBytes: 2435063 }
|
||||||
|
2022-01-14 09:56:20 INFO: Listening...
|
||||||
|
...
|
||||||
|
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/html', size: 1047, url: '/', remote: '::1' }
|
||||||
|
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 6919, url: '/index.js', remote: '::1' }
|
||||||
|
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 2435063, url: '/dist/face-api.esm.js', remote: '::1' }
|
||||||
|
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 4125244, url: '/dist/face-api.esm.js.map', remote: '::1' }
|
||||||
|
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 3219, url: '/model/tiny_face_detector_model-weights_manifest.json', remote: '::1' }
|
||||||
|
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 193321, url: '/model/tiny_face_detector_model.bin', remote: '::1' }
|
||||||
|
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 28233, url: '/model/ssd_mobilenetv1_model-weights_manifest.json', remote: '::1' }
|
||||||
|
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 5616957, url: '/model/ssd_mobilenetv1_model.bin', remote: '::1' }
|
||||||
|
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8392, url: '/model/age_gender_model-weights_manifest.json', remote: '::1' }
|
||||||
|
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 429708, url: '/model/age_gender_model.bin', remote: '::1' }
|
||||||
|
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8485, url: '/model/face_landmark_68_model-weights_manifest.json', remote: '::1' }
|
||||||
|
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 356840, url: '/model/face_landmark_68_model.bin', remote: '::1' }
|
||||||
|
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 19615, url: '/model/face_recognition_model-weights_manifest.json', remote: '::1' }
|
||||||
|
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 6444032, url: '/model/face_recognition_model.bin', remote: '::1' }
|
||||||
|
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 6980, url: '/model/face_expression_model-weights_manifest.json', remote: '::1' }
|
||||||
|
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 329468, url: '/model/face_expression_model.bin', remote: '::1' }
|
||||||
|
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'image/jpeg', size: 144516, url: '/sample1.jpg', remote: '::1' }
|
||||||
```
|
```
|
||||||
|
|
||||||
<br>
|
<br><hr><br>
|
||||||
<hr>
|
|
||||||
<br>
|
|
||||||
|
|
||||||
## Build
|
## Build
|
||||||
|
|
||||||
|
@ -366,58 +400,120 @@ cd face-api
|
||||||
Then install all dependencies and run rebuild:
|
Then install all dependencies and run rebuild:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
npm install
|
npm install --production=false
|
||||||
npm run build
|
npm run build
|
||||||
```
|
```
|
||||||
|
|
||||||
Build process uses script `build.js` that creates optimized build for each target:
|
Build process uses `@vladmandic/build` module that creates optimized build for each target:
|
||||||
|
|
||||||
```text
|
```js
|
||||||
npm run build
|
> @vladmandic/face-api@1.7.1 build /home/vlado/dev/face-api
|
||||||
|
> node build.js
|
||||||
|
|
||||||
> @vladmandic/face-api@0.8.9 build /home/vlado/dev/face-api
|
2022-07-25 08:21:05 INFO: Application: { name: '@vladmandic/face-api', version: '1.7.1' }
|
||||||
> rimraf dist/* && node ./build.js
|
2022-07-25 08:21:05 INFO: Environment: { profile: 'production', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true }
|
||||||
|
2022-07-25 08:21:05 INFO: Toolchain: { build: '0.7.7', esbuild: '0.14.50', typescript: '4.7.4', typedoc: '0.23.9', eslint: '8.20.0' }
|
||||||
|
2022-07-25 08:21:05 INFO: Build: { profile: 'production', steps: [ 'clean', 'compile', 'typings', 'typedoc', 'lint', 'changelog' ] }
|
||||||
|
2022-07-25 08:21:05 STATE: Clean: { locations: [ 'dist/*', 'typedoc/*', 'types/lib/src' ] }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 614 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234137, outputBytes: 85701 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 618 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234141, outputBytes: 85705 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 670 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234193, outputBytes: 85755 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 400 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 910, outputBytes: 527 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234050, outputBytes: 82787 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 910, outputBytes: 1184871 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 1418394, outputBytes: 1264631 }
|
||||||
|
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 1418394, outputBytes: 1264150 }
|
||||||
|
2022-07-25 08:21:07 STATE: Typings: { input: 'src/index.ts', output: 'types/lib', files: 93 }
|
||||||
|
2022-07-25 08:21:09 STATE: TypeDoc: { input: 'src/index.ts', output: 'typedoc', objects: 154, generated: true }
|
||||||
|
2022-07-25 08:21:13 STATE: Lint: { locations: [ 'src/' ], files: 174, errors: 0, warnings: 0 }
|
||||||
|
2022-07-25 08:21:14 STATE: ChangeLog: { repository: 'https://github.com/vladmandic/face-api', branch: 'master', output: 'CHANGELOG.md' }
|
||||||
|
2022-07-25 08:21:14 INFO: Done...
|
||||||
|
2022-07-25 08:21:14 STATE: Copy: { input: 'types/lib/dist/tfjs.esm.d.ts' }
|
||||||
|
2022-07-25 08:21:15 STATE: API-Extractor: { succeeeded: true, errors: 0, warnings: 417 }
|
||||||
|
2022-07-25 08:21:15 INFO: FaceAPI Build complete...
|
||||||
```
|
```
|
||||||
|
|
||||||
```json
|
<br><hr><br>
|
||||||
2021-01-10 08:42:01 INFO: @vladmandic/face-api version 0.10.2
|
|
||||||
2021-01-10 08:42:01 INFO: User: vlado Platform: linux Arch: x64 Node: v15.4.0
|
|
||||||
2021-01-10 08:42:01 INFO: Build: file startup all target: es2018
|
|
||||||
2021-01-10 08:42:01 STATE: Build for: node type: tfjs: { imports: 1, importBytes: 143, outputBytes: 1042, outputFiles: 'dist/tfjs.esm.js' }
|
|
||||||
2021-01-10 08:42:01 STATE: Build for: node type: node: { imports: 160, importBytes: 233115, outputBytes: 132266, outputFiles: 'dist/face-api.node.js' }
|
|
||||||
2021-01-10 08:42:01 STATE: Build for: nodeGPU type: tfjs: { imports: 1, importBytes: 147, outputBytes: 1046, outputFiles: 'dist/tfjs.esm.js' }
|
|
||||||
2021-01-10 08:42:01 STATE: Build for: nodeGPU type: node: { imports: 160, importBytes: 233119, outputBytes: 132274, outputFiles: 'dist/face-api.node-gpu.js' }
|
|
||||||
2021-01-10 08:42:01 STATE: Build for: browserNoBundle type: tfjs: { imports: 1, importBytes: 276, outputBytes: 244, outputFiles: 'dist/tfjs.esm.js' }
|
|
||||||
2021-01-10 08:42:01 STATE: Build for: browserNoBundle type: esm: { imports: 160, importBytes: 232317, outputBytes: 129069, outputFiles: 'dist/face-api.esm-nobundle.js' }
|
|
||||||
2021-01-10 08:42:01 STATE: Build for: browserBundle type: tfjs: { modules: 1253, moduleBytes: 3997175, imports: 7, importBytes: 276, outputBytes: 1565414, outputFiles: 'dist/tfjs.esm.js' }
|
|
||||||
2021-01-10 08:42:02 STATE: Build for: browserBundle type: iife: { imports: 160, importBytes: 1797487, outputBytes: 1699552, outputFiles: 'dist/face-api.js' }
|
|
||||||
2021-01-10 08:42:02 STATE: Build for: browserBundle type: esm: { imports: 160, importBytes: 1797487, outputBytes: 1697086, outputFiles: 'dist/face-api.esm.js' }
|
|
||||||
2021-01-10 08:42:02 INFO: Compile: [ 'src/index.ts', [length]: 1 ]```
|
|
||||||
```
|
|
||||||
|
|
||||||
<br>
|
|
||||||
<hr>
|
|
||||||
<br>
|
|
||||||
|
|
||||||
## Face Mesh
|
## Face Mesh
|
||||||
|
|
||||||
`FaceAPI` landmark model returns 68-point face mesh as detailed in the image below:
|
`FaceAPI` landmark model returns 68-point face mesh as detailed in the image below:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Documentation
|
<br><hr><br>
|
||||||
|
|
||||||
- [**Tutorial**](TUTORIAL.md)
|
## Note
|
||||||
- [**API Documentation**](https://justadudewhohacks.github.io/face-api.js/docs/globals.html)
|
|
||||||
|
This is updated **face-api.js** with latest available TensorFlow/JS as the original is not compatible with **tfjs >=2.0**.
|
||||||
|
Forked from [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2** which was released on March 22nd, 2020
|
||||||
|
|
||||||
|
*Why?* I needed a FaceAPI that does not cause version conflict with newer versions of TensorFlow
|
||||||
|
And since the original FaceAPI was open-source, I've released this version as well
|
||||||
|
|
||||||
|
Changes ended up being too large for a simple pull request and it ended up being a full-fledged version on its own
|
||||||
|
Plus many features were added since the original inception
|
||||||
|
|
||||||
|
Although a lot of work has gone into this version of `FaceAPI` and it will continue to be maintained,
|
||||||
|
at this time it is completely superseded by my newer library `Human` which covers the same use cases,
|
||||||
|
but extends it with newer AI models, additional detection details, compatibility with latest web standard and more
|
||||||
|
|
||||||
|
- [Human NPM](https://www.npmjs.com/package/@vladmandic/human)
|
||||||
|
- [Human Git Repository](https://github.com/vladmandic/human)
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
## Differences
|
||||||
|
|
||||||
|
Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2**:
|
||||||
|
|
||||||
|
- Compatible with `TensorFlow/JS 2.0+, 3.0+ and 4.0+`
|
||||||
|
Currently using **`TensorFlow/JS` 4.16**
|
||||||
|
Original `face-api.js` is based on `TFJS` **1.7.4**
|
||||||
|
- Compatible with `WebGL`, `CPU` and `WASM` TFJS Browser backends
|
||||||
|
- Compatible with both `tfjs-node` and `tfjs-node-gpu` TFJS NodeJS backends
|
||||||
|
- Updated all type castings for TypeScript type checking to `TypeScript 5.3`
|
||||||
|
- Switched bundling from `UMD` to `ESM` + `CommonJS` with fallback to `IIFE`
|
||||||
|
Resulting code is optimized per-platform instead of being universal
|
||||||
|
Fully tree shakable when imported as an `ESM` module
|
||||||
|
Browser bundle process uses `ESBuild` instead of `Rollup`
|
||||||
|
- Added separate `face-api` versions with `tfjs` pre-bundled and without `tfjs`
|
||||||
|
When using `-nobundle` version, user can load any version of `tfjs` manually
|
||||||
|
- Typescript build process now targets `ES2018` and instead of dual `ES5`/`ES6`
|
||||||
|
Resulting code is clean ES2018 JavaScript without polyfills
|
||||||
|
- Removed old tests, docs, examples
|
||||||
|
- Removed old package dependencies (`karma`, `jasmine`, `babel`, etc.)
|
||||||
|
- Updated all package dependencies
|
||||||
|
- Updated TensorFlow/JS dependencies since backends were removed from `@tensorflow/tfjs-core`
|
||||||
|
- Updated `mobileNetv1` model due to `batchNorm()` dependency
|
||||||
|
- Added `version` class that returns JSON object with version of FaceAPI as well as linked TFJS
|
||||||
|
- Added test/dev built-in HTTP & HTTPS Web server
|
||||||
|
- Removed `mtcnn` and `tinyYolov2` models as they were non-functional in latest public version of `FaceAPI`
|
||||||
|
Which means valid models are **tinyFaceDetector** and **mobileNetv1**
|
||||||
|
*If there is a demand, I can re-implement them back.*
|
||||||
|
- Added `face angle` calculations that returns `roll`, `yaw` and `pitch`
|
||||||
|
- Added `typdoc` automatic API specification generation during build
|
||||||
|
- Added `changelog` automatic generation during build
|
||||||
|
- New process to generate **TypeDocs** bundle using API-Extractor
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
## Credits
|
## Credits
|
||||||
|
|
||||||
- Original project: [Face-API](https://github.com/justadudewhohacks/face-api.js)
|
- Original project: [face-api.js](https://github.com/justadudewhohacks/face-api.js)
|
||||||
- Original model weighs: [Face-API](https://github.com/justadudewhohacks/face-api.js-models)
|
- Original model weighs: [face-api.js-models](https://github.com/justadudewhohacks/face-api.js-models)
|
||||||
- ML API Documentation: [Tensorflow/JS](https://js.tensorflow.org/api/latest/)
|
- ML API Documentation: [Tensorflow/JS](https://js.tensorflow.org/api/latest/)
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||

|

|
||||||

|

|
||||||

|

|
||||||
|
<br>
|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
# Security Policy
|
||||||
|
|
||||||
|
All issues are tracked publicly on GitHub
|
||||||
|
|
||||||
|
Entire code base and indluded dependencies is automatically scanned against known security vulnerabilities
|
108
TUTORIAL.md
|
@ -1,4 +1,4 @@
|
||||||
# FaceAPI Usage
|
# FaceAPI Tutorial
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@
|
||||||
|
|
||||||
All global neural network instances are exported via faceapi.nets:
|
All global neural network instances are exported via faceapi.nets:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
console.log(faceapi.nets)
|
console.log(faceapi.nets)
|
||||||
// ageGenderNet
|
// ageGenderNet
|
||||||
// faceExpressionNet
|
// faceExpressionNet
|
||||||
|
@ -54,7 +54,7 @@ To load a model, you have to provide the corresponding manifest.json file as wel
|
||||||
|
|
||||||
Assuming the models reside in **public/models**:
|
Assuming the models reside in **public/models**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
await faceapi.nets.ssdMobilenetv1.loadFromUri('/models')
|
await faceapi.nets.ssdMobilenetv1.loadFromUri('/models')
|
||||||
// accordingly for the other models:
|
// accordingly for the other models:
|
||||||
// await faceapi.nets.faceLandmark68Net.loadFromUri('/models')
|
// await faceapi.nets.faceLandmark68Net.loadFromUri('/models')
|
||||||
|
@ -64,26 +64,26 @@ await faceapi.nets.ssdMobilenetv1.loadFromUri('/models')
|
||||||
|
|
||||||
In a nodejs environment you can furthermore load the models directly from disk:
|
In a nodejs environment you can furthermore load the models directly from disk:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
await faceapi.nets.ssdMobilenetv1.loadFromDisk('./models')
|
await faceapi.nets.ssdMobilenetv1.loadFromDisk('./models')
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also load the model from a tf.NamedTensorMap:
|
You can also load the model from a tf.NamedTensorMap:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
await faceapi.nets.ssdMobilenetv1.loadFromWeightMap(weightMap)
|
await faceapi.nets.ssdMobilenetv1.loadFromWeightMap(weightMap)
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, you can also create own instances of the neural nets:
|
Alternatively, you can also create own instances of the neural nets:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const net = new faceapi.SsdMobilenetv1()
|
const net = new faceapi.SsdMobilenetv1()
|
||||||
await net.loadFromUri('/models')
|
await net.loadFromUri('/models')
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also load the weights as a Float32Array (in case you want to use the uncompressed models):
|
You can also load the weights as a Float32Array (in case you want to use the uncompressed models):
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
// using fetch
|
// using fetch
|
||||||
net.load(await faceapi.fetchNetWeights('/models/face_detection_model.weights'))
|
net.load(await faceapi.fetchNetWeights('/models/face_detection_model.weights'))
|
||||||
|
|
||||||
|
@ -105,7 +105,7 @@ In the following **input** can be an HTML img, video or canvas element or the id
|
||||||
<canvas id="myCanvas" />
|
<canvas id="myCanvas" />
|
||||||
```
|
```
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const input = document.getElementById('myImg')
|
const input = document.getElementById('myImg')
|
||||||
// const input = document.getElementById('myVideo')
|
// const input = document.getElementById('myVideo')
|
||||||
// const input = document.getElementById('myCanvas')
|
// const input = document.getElementById('myCanvas')
|
||||||
|
@ -117,19 +117,19 @@ const input = document.getElementById('myImg')
|
||||||
|
|
||||||
Detect all faces in an image. Returns **Array<[FaceDetection](#interface-face-detection)>**:
|
Detect all faces in an image. Returns **Array<[FaceDetection](#interface-face-detection)>**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detections = await faceapi.detectAllFaces(input)
|
const detections = await faceapi.detectAllFaces(input)
|
||||||
```
|
```
|
||||||
|
|
||||||
Detect the face with the highest confidence score in an image. Returns **[FaceDetection](#interface-face-detection) | undefined**:
|
Detect the face with the highest confidence score in an image. Returns **[FaceDetection](#interface-face-detection) | undefined**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detection = await faceapi.detectSingleFace(input)
|
const detection = await faceapi.detectSingleFace(input)
|
||||||
```
|
```
|
||||||
|
|
||||||
By default **detectAllFaces** and **detectSingleFace** utilize the SSD Mobilenet V1 Face Detector. You can specify the face detector by passing the corresponding options object:
|
By default **detectAllFaces** and **detectSingleFace** utilize the SSD Mobilenet V1 Face Detector. You can specify the face detector by passing the corresponding options object:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detections1 = await faceapi.detectAllFaces(input, new faceapi.SsdMobilenetv1Options())
|
const detections1 = await faceapi.detectAllFaces(input, new faceapi.SsdMobilenetv1Options())
|
||||||
const detections2 = await faceapi.detectAllFaces(input, new faceapi.TinyFaceDetectorOptions())
|
const detections2 = await faceapi.detectAllFaces(input, new faceapi.TinyFaceDetectorOptions())
|
||||||
```
|
```
|
||||||
|
@ -142,19 +142,19 @@ You can tune the options of each face detector as shown [here](#getting-started-
|
||||||
|
|
||||||
Detect all faces in an image + computes 68 Point Face Landmarks for each detected face. Returns **Array<[WithFaceLandmarks<WithFaceDetection<{}>>](#getting-started-utility-classes)>**:
|
Detect all faces in an image + computes 68 Point Face Landmarks for each detected face. Returns **Array<[WithFaceLandmarks<WithFaceDetection<{}>>](#getting-started-utility-classes)>**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detectionsWithLandmarks = await faceapi.detectAllFaces(input).withFaceLandmarks()
|
const detectionsWithLandmarks = await faceapi.detectAllFaces(input).withFaceLandmarks()
|
||||||
```
|
```
|
||||||
|
|
||||||
Detect the face with the highest confidence score in an image + computes 68 Point Face Landmarks for that face. Returns **[WithFaceLandmarks<WithFaceDetection<{}>>](#getting-started-utility-classes) | undefined**:
|
Detect the face with the highest confidence score in an image + computes 68 Point Face Landmarks for that face. Returns **[WithFaceLandmarks<WithFaceDetection<{}>>](#getting-started-utility-classes) | undefined**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detectionWithLandmarks = await faceapi.detectSingleFace(input).withFaceLandmarks()
|
const detectionWithLandmarks = await faceapi.detectSingleFace(input).withFaceLandmarks()
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also specify to use the tiny model instead of the default model:
|
You can also specify to use the tiny model instead of the default model:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const useTinyModel = true
|
const useTinyModel = true
|
||||||
const detectionsWithLandmarks = await faceapi.detectAllFaces(input).withFaceLandmarks(useTinyModel)
|
const detectionsWithLandmarks = await faceapi.detectAllFaces(input).withFaceLandmarks(useTinyModel)
|
||||||
```
|
```
|
||||||
|
@ -165,13 +165,13 @@ const detectionsWithLandmarks = await faceapi.detectAllFaces(input).withFaceLand
|
||||||
|
|
||||||
Detect all faces in an image + compute 68 Point Face Landmarks for each detected face. Returns **Array<[WithFaceDescriptor<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes)>**:
|
Detect all faces in an image + compute 68 Point Face Landmarks for each detected face. Returns **Array<[WithFaceDescriptor<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes)>**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const results = await faceapi.detectAllFaces(input).withFaceLandmarks().withFaceDescriptors()
|
const results = await faceapi.detectAllFaces(input).withFaceLandmarks().withFaceDescriptors()
|
||||||
```
|
```
|
||||||
|
|
||||||
Detect the face with the highest confidence score in an image + compute 68 Point Face Landmarks and face descriptor for that face. Returns **[WithFaceDescriptor<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes) | undefined**:
|
Detect the face with the highest confidence score in an image + compute 68 Point Face Landmarks and face descriptor for that face. Returns **[WithFaceDescriptor<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes) | undefined**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const result = await faceapi.detectSingleFace(input).withFaceLandmarks().withFaceDescriptor()
|
const result = await faceapi.detectSingleFace(input).withFaceLandmarks().withFaceDescriptor()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -181,13 +181,13 @@ const result = await faceapi.detectSingleFace(input).withFaceLandmarks().withFac
|
||||||
|
|
||||||
Detect all faces in an image + recognize face expressions of each face. Returns **Array<[WithFaceExpressions<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes)>**:
|
Detect all faces in an image + recognize face expressions of each face. Returns **Array<[WithFaceExpressions<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes)>**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detectionsWithExpressions = await faceapi.detectAllFaces(input).withFaceLandmarks().withFaceExpressions()
|
const detectionsWithExpressions = await faceapi.detectAllFaces(input).withFaceLandmarks().withFaceExpressions()
|
||||||
```
|
```
|
||||||
|
|
||||||
Detect the face with the highest confidence score in an image + recognize the face expressions for that face. Returns **[WithFaceExpressions<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes) | undefined**:
|
Detect the face with the highest confidence score in an image + recognize the face expressions for that face. Returns **[WithFaceExpressions<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes) | undefined**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detectionWithExpressions = await faceapi.detectSingleFace(input).withFaceLandmarks().withFaceExpressions()
|
const detectionWithExpressions = await faceapi.detectSingleFace(input).withFaceLandmarks().withFaceExpressions()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -195,13 +195,13 @@ const detectionWithExpressions = await faceapi.detectSingleFace(input).withFaceL
|
||||||
|
|
||||||
Detect all faces without face alignment + recognize face expressions of each face. Returns **Array<[WithFaceExpressions<WithFaceDetection<{}>>](#getting-started-utility-classes)>**:
|
Detect all faces without face alignment + recognize face expressions of each face. Returns **Array<[WithFaceExpressions<WithFaceDetection<{}>>](#getting-started-utility-classes)>**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detectionsWithExpressions = await faceapi.detectAllFaces(input).withFaceExpressions()
|
const detectionsWithExpressions = await faceapi.detectAllFaces(input).withFaceExpressions()
|
||||||
```
|
```
|
||||||
|
|
||||||
Detect the face with the highest confidence score without face alignment + recognize the face expression for that face. Returns **[WithFaceExpressions<WithFaceDetection<{}>>](#getting-started-utility-classes) | undefined**:
|
Detect the face with the highest confidence score without face alignment + recognize the face expression for that face. Returns **[WithFaceExpressions<WithFaceDetection<{}>>](#getting-started-utility-classes) | undefined**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detectionWithExpressions = await faceapi.detectSingleFace(input).withFaceExpressions()
|
const detectionWithExpressions = await faceapi.detectSingleFace(input).withFaceExpressions()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -211,13 +211,13 @@ const detectionWithExpressions = await faceapi.detectSingleFace(input).withFaceE
|
||||||
|
|
||||||
Detect all faces in an image + estimate age and recognize gender of each face. Returns **Array<[WithAge<WithGender<WithFaceLandmarks<WithFaceDetection<{}>>>>](#getting-started-utility-classes)>**:
|
Detect all faces in an image + estimate age and recognize gender of each face. Returns **Array<[WithAge<WithGender<WithFaceLandmarks<WithFaceDetection<{}>>>>](#getting-started-utility-classes)>**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detectionsWithAgeAndGender = await faceapi.detectAllFaces(input).withFaceLandmarks().withAgeAndGender()
|
const detectionsWithAgeAndGender = await faceapi.detectAllFaces(input).withFaceLandmarks().withAgeAndGender()
|
||||||
```
|
```
|
||||||
|
|
||||||
Detect the face with the highest confidence score in an image + estimate age and recognize gender for that face. Returns **[WithAge<WithGender<WithFaceLandmarks<WithFaceDetection<{}>>>>](#getting-started-utility-classes) | undefined**:
|
Detect the face with the highest confidence score in an image + estimate age and recognize gender for that face. Returns **[WithAge<WithGender<WithFaceLandmarks<WithFaceDetection<{}>>>>](#getting-started-utility-classes) | undefined**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detectionWithAgeAndGender = await faceapi.detectSingleFace(input).withFaceLandmarks().withAgeAndGender()
|
const detectionWithAgeAndGender = await faceapi.detectSingleFace(input).withFaceLandmarks().withAgeAndGender()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -225,13 +225,13 @@ const detectionWithAgeAndGender = await faceapi.detectSingleFace(input).withFace
|
||||||
|
|
||||||
Detect all faces without face alignment + estimate age and recognize gender of each face. Returns **Array<[WithAge<WithGender<WithFaceDetection<{}>>>](#getting-started-utility-classes)>**:
|
Detect all faces without face alignment + estimate age and recognize gender of each face. Returns **Array<[WithAge<WithGender<WithFaceDetection<{}>>>](#getting-started-utility-classes)>**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detectionsWithAgeAndGender = await faceapi.detectAllFaces(input).withAgeAndGender()
|
const detectionsWithAgeAndGender = await faceapi.detectAllFaces(input).withAgeAndGender()
|
||||||
```
|
```
|
||||||
|
|
||||||
Detect the face with the highest confidence score without face alignment + estimate age and recognize gender for that face. Returns **[WithAge<WithGender<WithFaceDetection<{}>>>](#getting-started-utility-classes) | undefined**:
|
Detect the face with the highest confidence score without face alignment + estimate age and recognize gender for that face. Returns **[WithAge<WithGender<WithFaceDetection<{}>>>](#getting-started-utility-classes) | undefined**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detectionWithAgeAndGender = await faceapi.detectSingleFace(input).withAgeAndGender()
|
const detectionWithAgeAndGender = await faceapi.detectSingleFace(input).withAgeAndGender()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -239,7 +239,7 @@ const detectionWithAgeAndGender = await faceapi.detectSingleFace(input).withAgeA
|
||||||
|
|
||||||
**Tasks can be composed as follows:**
|
**Tasks can be composed as follows:**
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
// all faces
|
// all faces
|
||||||
await faceapi.detectAllFaces(input)
|
await faceapi.detectAllFaces(input)
|
||||||
await faceapi.detectAllFaces(input).withFaceExpressions()
|
await faceapi.detectAllFaces(input).withFaceExpressions()
|
||||||
|
@ -265,7 +265,7 @@ To perform face recognition, one can use faceapi.FaceMatcher to compare referenc
|
||||||
|
|
||||||
First, we initialize the FaceMatcher with the reference data, for example we can simply detect faces in a **referenceImage** and match the descriptors of the detected faces to faces of subsequent images:
|
First, we initialize the FaceMatcher with the reference data, for example we can simply detect faces in a **referenceImage** and match the descriptors of the detected faces to faces of subsequent images:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const results = await faceapi
|
const results = await faceapi
|
||||||
.detectAllFaces(referenceImage)
|
.detectAllFaces(referenceImage)
|
||||||
.withFaceLandmarks()
|
.withFaceLandmarks()
|
||||||
|
@ -282,7 +282,7 @@ const faceMatcher = new faceapi.FaceMatcher(results)
|
||||||
|
|
||||||
Now we can recognize a persons face shown in **queryImage1**:
|
Now we can recognize a persons face shown in **queryImage1**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const singleResult = await faceapi
|
const singleResult = await faceapi
|
||||||
.detectSingleFace(queryImage1)
|
.detectSingleFace(queryImage1)
|
||||||
.withFaceLandmarks()
|
.withFaceLandmarks()
|
||||||
|
@ -296,7 +296,7 @@ if (singleResult) {
|
||||||
|
|
||||||
Or we can recognize all faces shown in **queryImage2**:
|
Or we can recognize all faces shown in **queryImage2**:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const results = await faceapi
|
const results = await faceapi
|
||||||
.detectAllFaces(queryImage2)
|
.detectAllFaces(queryImage2)
|
||||||
.withFaceLandmarks()
|
.withFaceLandmarks()
|
||||||
|
@ -310,7 +310,7 @@ results.forEach(fd => {
|
||||||
|
|
||||||
You can also create labeled reference descriptors as follows:
|
You can also create labeled reference descriptors as follows:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const labeledDescriptors = [
|
const labeledDescriptors = [
|
||||||
new faceapi.LabeledFaceDescriptors(
|
new faceapi.LabeledFaceDescriptors(
|
||||||
'obama',
|
'obama',
|
||||||
|
@ -331,7 +331,7 @@ const faceMatcher = new faceapi.FaceMatcher(labeledDescriptors)
|
||||||
|
|
||||||
Preparing the overlay canvas:
|
Preparing the overlay canvas:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const displaySize = { width: input.width, height: input.height }
|
const displaySize = { width: input.width, height: input.height }
|
||||||
// resize the overlay canvas to the input dimensions
|
// resize the overlay canvas to the input dimensions
|
||||||
const canvas = document.getElementById('overlay')
|
const canvas = document.getElementById('overlay')
|
||||||
|
@ -340,7 +340,7 @@ faceapi.matchDimensions(canvas, displaySize)
|
||||||
|
|
||||||
face-api.js predefines some highlevel drawing functions, which you can utilize:
|
face-api.js predefines some highlevel drawing functions, which you can utilize:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
/* Display detected face bounding boxes */
|
/* Display detected face bounding boxes */
|
||||||
const detections = await faceapi.detectAllFaces(input)
|
const detections = await faceapi.detectAllFaces(input)
|
||||||
// resize the detected boxes in case your displayed image has a different size than the original
|
// resize the detected boxes in case your displayed image has a different size than the original
|
||||||
|
@ -376,7 +376,7 @@ faceapi.draw.drawFaceExpressions(canvas, resizedResults, minProbability)
|
||||||
|
|
||||||
You can also draw boxes with custom text ([DrawBox](https://github.com/justadudewhohacks/tfjs-image-recognition-base/blob/master/src/draw/DrawBox.ts)):
|
You can also draw boxes with custom text ([DrawBox](https://github.com/justadudewhohacks/tfjs-image-recognition-base/blob/master/src/draw/DrawBox.ts)):
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const box = { x: 50, y: 50, width: 100, height: 100 }
|
const box = { x: 50, y: 50, width: 100, height: 100 }
|
||||||
// see DrawBoxOptions below
|
// see DrawBoxOptions below
|
||||||
const drawOptions = {
|
const drawOptions = {
|
||||||
|
@ -389,7 +389,7 @@ drawBox.draw(document.getElementById('myCanvas'))
|
||||||
|
|
||||||
DrawBox drawing options:
|
DrawBox drawing options:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export interface IDrawBoxOptions {
|
export interface IDrawBoxOptions {
|
||||||
boxColor?: string
|
boxColor?: string
|
||||||
lineWidth?: number
|
lineWidth?: number
|
||||||
|
@ -400,7 +400,7 @@ export interface IDrawBoxOptions {
|
||||||
|
|
||||||
Finally you can draw custom text fields ([DrawTextField](https://github.com/justadudewhohacks/tfjs-image-recognition-base/blob/master/src/draw/DrawTextField.ts)):
|
Finally you can draw custom text fields ([DrawTextField](https://github.com/justadudewhohacks/tfjs-image-recognition-base/blob/master/src/draw/DrawTextField.ts)):
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const text = [
|
const text = [
|
||||||
'This is a textline!',
|
'This is a textline!',
|
||||||
'This is another textline!'
|
'This is another textline!'
|
||||||
|
@ -417,7 +417,7 @@ drawBox.draw(document.getElementById('myCanvas'))
|
||||||
|
|
||||||
DrawTextField drawing options:
|
DrawTextField drawing options:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export interface IDrawTextFieldOptions {
|
export interface IDrawTextFieldOptions {
|
||||||
anchorPosition?: AnchorPosition
|
anchorPosition?: AnchorPosition
|
||||||
backgroundColor?: string
|
backgroundColor?: string
|
||||||
|
@ -441,7 +441,7 @@ export enum AnchorPosition {
|
||||||
|
|
||||||
#### SsdMobilenetv1Options
|
#### SsdMobilenetv1Options
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export interface ISsdMobilenetv1Options {
|
export interface ISsdMobilenetv1Options {
|
||||||
// minimum confidence threshold
|
// minimum confidence threshold
|
||||||
// default: 0.5
|
// default: 0.5
|
||||||
|
@ -458,7 +458,7 @@ const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.8 })
|
||||||
|
|
||||||
#### TinyFaceDetectorOptions
|
#### TinyFaceDetectorOptions
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export interface ITinyFaceDetectorOptions {
|
export interface ITinyFaceDetectorOptions {
|
||||||
// size at which image is processed, the smaller the faster,
|
// size at which image is processed, the smaller the faster,
|
||||||
// but less precise in detecting smaller faces, must be divisible
|
// but less precise in detecting smaller faces, must be divisible
|
||||||
|
@ -483,7 +483,7 @@ const options = new faceapi.TinyFaceDetectorOptions({ inputSize: 320 })
|
||||||
|
|
||||||
#### IBox
|
#### IBox
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export interface IBox {
|
export interface IBox {
|
||||||
x: number
|
x: number
|
||||||
y: number
|
y: number
|
||||||
|
@ -494,7 +494,7 @@ export interface IBox {
|
||||||
|
|
||||||
#### IFaceDetection
|
#### IFaceDetection
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export interface IFaceDetection {
|
export interface IFaceDetection {
|
||||||
score: number
|
score: number
|
||||||
box: Box
|
box: Box
|
||||||
|
@ -503,7 +503,7 @@ export interface IFaceDetection {
|
||||||
|
|
||||||
#### IFaceLandmarks
|
#### IFaceLandmarks
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export interface IFaceLandmarks {
|
export interface IFaceLandmarks {
|
||||||
positions: Point[]
|
positions: Point[]
|
||||||
shift: Point
|
shift: Point
|
||||||
|
@ -512,7 +512,7 @@ export interface IFaceLandmarks {
|
||||||
|
|
||||||
#### WithFaceDetection
|
#### WithFaceDetection
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export type WithFaceDetection<TSource> = TSource & {
|
export type WithFaceDetection<TSource> = TSource & {
|
||||||
detection: FaceDetection
|
detection: FaceDetection
|
||||||
}
|
}
|
||||||
|
@ -520,7 +520,7 @@ export type WithFaceDetection<TSource> = TSource & {
|
||||||
|
|
||||||
#### WithFaceLandmarks
|
#### WithFaceLandmarks
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export type WithFaceLandmarks<TSource> = TSource & {
|
export type WithFaceLandmarks<TSource> = TSource & {
|
||||||
unshiftedLandmarks: FaceLandmarks
|
unshiftedLandmarks: FaceLandmarks
|
||||||
landmarks: FaceLandmarks
|
landmarks: FaceLandmarks
|
||||||
|
@ -533,7 +533,7 @@ export type WithFaceLandmarks<TSource> = TSource & {
|
||||||
|
|
||||||
#### WithFaceDescriptor
|
#### WithFaceDescriptor
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export type WithFaceDescriptor<TSource> = TSource & {
|
export type WithFaceDescriptor<TSource> = TSource & {
|
||||||
descriptor: Float32Array
|
descriptor: Float32Array
|
||||||
}
|
}
|
||||||
|
@ -541,7 +541,7 @@ export type WithFaceDescriptor<TSource> = TSource & {
|
||||||
|
|
||||||
#### WithFaceExpressions
|
#### WithFaceExpressions
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export type WithFaceExpressions<TSource> = TSource & {
|
export type WithFaceExpressions<TSource> = TSource & {
|
||||||
expressions: FaceExpressions
|
expressions: FaceExpressions
|
||||||
}
|
}
|
||||||
|
@ -549,7 +549,7 @@ export type WithFaceExpressions<TSource> = TSource & {
|
||||||
|
|
||||||
#### WithAge
|
#### WithAge
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export type WithAge<TSource> = TSource & {
|
export type WithAge<TSource> = TSource & {
|
||||||
age: number
|
age: number
|
||||||
}
|
}
|
||||||
|
@ -557,7 +557,7 @@ export type WithAge<TSource> = TSource & {
|
||||||
|
|
||||||
#### WithGender
|
#### WithGender
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
export type WithGender<TSource> = TSource & {
|
export type WithGender<TSource> = TSource & {
|
||||||
gender: Gender
|
gender: Gender
|
||||||
genderProbability: number
|
genderProbability: number
|
||||||
|
@ -577,7 +577,7 @@ export enum Gender {
|
||||||
|
|
||||||
Instead of using the high level API, you can directly use the forward methods of each neural network:
|
Instead of using the high level API, you can directly use the forward methods of each neural network:
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const detections1 = await faceapi.ssdMobilenetv1(input, options)
|
const detections1 = await faceapi.ssdMobilenetv1(input, options)
|
||||||
const detections2 = await faceapi.tinyFaceDetector(input, options)
|
const detections2 = await faceapi.tinyFaceDetector(input, options)
|
||||||
const landmarks1 = await faceapi.detectFaceLandmarks(faceImage)
|
const landmarks1 = await faceapi.detectFaceLandmarks(faceImage)
|
||||||
|
@ -587,7 +587,7 @@ const descriptor = await faceapi.computeFaceDescriptor(alignedFaceImage)
|
||||||
|
|
||||||
#### Extracting a Canvas for an Image Region
|
#### Extracting a Canvas for an Image Region
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const regionsToExtract = [
|
const regionsToExtract = [
|
||||||
new faceapi.Rect(0, 0, 100, 100)
|
new faceapi.Rect(0, 0, 100, 100)
|
||||||
]
|
]
|
||||||
|
@ -598,7 +598,7 @@ const canvases = await faceapi.extractFaces(input, regionsToExtract)
|
||||||
|
|
||||||
#### Euclidean Distance
|
#### Euclidean Distance
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
// ment to be used for computing the euclidean distance between two face descriptors
|
// ment to be used for computing the euclidean distance between two face descriptors
|
||||||
const dist = faceapi.euclideanDistance([0, 0], [0, 10])
|
const dist = faceapi.euclideanDistance([0, 0], [0, 10])
|
||||||
console.log(dist) // 10
|
console.log(dist) // 10
|
||||||
|
@ -606,7 +606,7 @@ console.log(dist) // 10
|
||||||
|
|
||||||
#### Retrieve the Face Landmark Points and Contours
|
#### Retrieve the Face Landmark Points and Contours
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const landmarkPositions = landmarks.positions
|
const landmarkPositions = landmarks.positions
|
||||||
|
|
||||||
// or get the positions of individual contours,
|
// or get the positions of individual contours,
|
||||||
|
@ -626,7 +626,7 @@ const rightEyeBrow = landmarks.getRightEyeBrow()
|
||||||
<img id="myImg" src="">
|
<img id="myImg" src="">
|
||||||
```
|
```
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const image = await faceapi.fetchImage('/images/example.png')
|
const image = await faceapi.fetchImage('/images/example.png')
|
||||||
|
|
||||||
console.log(image instanceof HTMLImageElement) // true
|
console.log(image instanceof HTMLImageElement) // true
|
||||||
|
@ -638,7 +638,7 @@ myImg.src = image.src
|
||||||
|
|
||||||
#### Fetching JSON
|
#### Fetching JSON
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const json = await faceapi.fetchJson('/files/example.json')
|
const json = await faceapi.fetchJson('/files/example.json')
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -649,7 +649,7 @@ const json = await faceapi.fetchJson('/files/example.json')
|
||||||
<input id="myFileUpload" type="file" onchange="uploadImage()" accept=".jpg, .jpeg, .png">
|
<input id="myFileUpload" type="file" onchange="uploadImage()" accept=".jpg, .jpeg, .png">
|
||||||
```
|
```
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
async function uploadImage() {
|
async function uploadImage() {
|
||||||
const imgFile = document.getElementById('myFileUpload').files[0]
|
const imgFile = document.getElementById('myFileUpload').files[0]
|
||||||
// create an HTMLImageElement from a Blob
|
// create an HTMLImageElement from a Blob
|
||||||
|
@ -665,7 +665,7 @@ async function uploadImage() {
|
||||||
<video id="myVideo" src="media/example.mp4" />
|
<video id="myVideo" src="media/example.mp4" />
|
||||||
```
|
```
|
||||||
|
|
||||||
``` javascript
|
```js
|
||||||
const canvas1 = faceapi.createCanvasFromMedia(document.getElementById('myImg'))
|
const canvas1 = faceapi.createCanvasFromMedia(document.getElementById('myImg'))
|
||||||
const canvas2 = faceapi.createCanvasFromMedia(document.getElementById('myVideo'))
|
const canvas2 = faceapi.createCanvasFromMedia(document.getElementById('myVideo'))
|
||||||
```
|
```
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
{
|
||||||
|
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
|
||||||
|
"mainEntryPointFilePath": "types/lib/src/index.d.ts",
|
||||||
|
"bundledPackages": ["@tensorflow/tfjs-core", "@tensorflow/tfjs-converter", "@types/offscreencanvas"],
|
||||||
|
"compiler": {
|
||||||
|
"skipLibCheck": false
|
||||||
|
},
|
||||||
|
"newlineKind": "lf",
|
||||||
|
"dtsRollup": {
|
||||||
|
"enabled": true,
|
||||||
|
"untrimmedFilePath": "types/face-api.d.ts"
|
||||||
|
},
|
||||||
|
"docModel": { "enabled": false },
|
||||||
|
"tsdocMetadata": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"apiReport": { "enabled": false },
|
||||||
|
"messages": {
|
||||||
|
"compilerMessageReporting": {
|
||||||
|
"default": {
|
||||||
|
"logLevel": "warning"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"extractorMessageReporting": {
|
||||||
|
"default": {
|
||||||
|
"logLevel": "warning"
|
||||||
|
},
|
||||||
|
"ae-missing-release-tag": {
|
||||||
|
"logLevel": "none"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tsdocMessageReporting": {
|
||||||
|
"default": {
|
||||||
|
"logLevel": "warning"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,77 @@
|
||||||
|
const fs = require('fs');
|
||||||
|
const log = require('@vladmandic/pilogger');
|
||||||
|
const Build = require('@vladmandic/build').Build;
|
||||||
|
const APIExtractor = require('@microsoft/api-extractor');
|
||||||
|
|
||||||
|
const regEx = [
|
||||||
|
{ search: 'types="@webgpu/types/dist"', replace: 'path="../src/types/webgpu.d.ts"' },
|
||||||
|
{ search: 'types="offscreencanvas"', replace: 'path="../src/types/offscreencanvas.d.ts"' },
|
||||||
|
];
|
||||||
|
|
||||||
|
function copyFile(src, dst) {
|
||||||
|
if (!fs.existsSync(src)) {
|
||||||
|
log.warn('Copy:', { input: src, output: dst });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
log.state('Copy:', { input: src, output: dst });
|
||||||
|
const buffer = fs.readFileSync(src);
|
||||||
|
fs.writeFileSync(dst, buffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
function writeFile(str, dst) {
|
||||||
|
log.state('Write:', { output: dst });
|
||||||
|
fs.writeFileSync(dst, str);
|
||||||
|
}
|
||||||
|
|
||||||
|
function regExFile(src, entries) {
|
||||||
|
if (!fs.existsSync(src)) {
|
||||||
|
log.warn('Filter:', { src });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
log.state('Filter:', { input: src });
|
||||||
|
for (const entry of entries) {
|
||||||
|
const buffer = fs.readFileSync(src, 'UTF-8');
|
||||||
|
const lines = buffer.split(/\r?\n/);
|
||||||
|
const out = [];
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.includes(entry.search)) out.push(line.replace(entry.search, entry.replace));
|
||||||
|
else out.push(line);
|
||||||
|
}
|
||||||
|
fs.writeFileSync(src, out.join('\n'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const apiIgnoreList = ['ae-forgotten-export', 'ae-unresolved-link', 'tsdoc-param-tag-missing-hyphen'];
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
// run production build
|
||||||
|
const build = new Build();
|
||||||
|
await build.run('production');
|
||||||
|
// patch tfjs typedefs
|
||||||
|
log.state('Copy:', { input: 'types/lib/dist/tfjs.esm.d.ts' });
|
||||||
|
copyFile('types/lib/dist/tfjs.esm.d.ts', 'dist/tfjs.esm.d.ts');
|
||||||
|
// run api-extractor to create typedef rollup
|
||||||
|
const extractorConfig = APIExtractor.ExtractorConfig.loadFileAndPrepare('api-extractor.json');
|
||||||
|
const extractorResult = APIExtractor.Extractor.invoke(extractorConfig, {
|
||||||
|
localBuild: true,
|
||||||
|
showVerboseMessages: false,
|
||||||
|
messageCallback: (msg) => {
|
||||||
|
msg.handled = true;
|
||||||
|
if (msg.logLevel === 'none' || msg.logLevel === 'verbose' || msg.logLevel === 'info') return;
|
||||||
|
if (msg.sourceFilePath?.includes('/node_modules/')) return;
|
||||||
|
if (apiIgnoreList.reduce((prev, curr) => prev || msg.messageId.includes(curr), false)) return;
|
||||||
|
log.data('API', { level: msg.logLevel, category: msg.category, id: msg.messageId, file: msg.sourceFilePath, line: msg.sourceFileLine, text: msg.text });
|
||||||
|
},
|
||||||
|
});
|
||||||
|
log.state('API-Extractor:', { succeeeded: extractorResult.succeeded, errors: extractorResult.errorCount, warnings: extractorResult.warningCount });
|
||||||
|
regExFile('types/face-api.d.ts', regEx);
|
||||||
|
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm-nobundle.d.ts');
|
||||||
|
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm.d.ts');
|
||||||
|
writeFile('export * from \'../types/face-api\';', 'dist/face-api.d.ts');
|
||||||
|
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node.d.ts');
|
||||||
|
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-gpu.d.ts');
|
||||||
|
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-wasm.d.ts');
|
||||||
|
log.info('FaceAPI Build complete...');
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 14 KiB |
|
@ -0,0 +1,17 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<title>FaceAPI Static Images Demo</title>
|
||||||
|
<meta http-equiv="content-type" content="text/html; charset=utf-8">
|
||||||
|
<meta name="viewport" content="width=device-width, shrink-to-fit=yes">
|
||||||
|
<meta name="application-name" content="FaceAPI">
|
||||||
|
<meta name="keywords" content="FaceAPI">
|
||||||
|
<meta name="description" content="FaceAPI: AI-powered Face Detection, Description & Recognition for Browser and NodeJS using Tensorflow/JS; Author: Vladimir Mandic <https://github.com/vladmandic>">
|
||||||
|
<meta name="msapplication-tooltip" content="FaceAPI: AI-powered Face Detection, Description & Recognition for Browser and NodeJS using Tensorflow/JS; Author: Vladimir Mandic <https://github.com/vladmandic>">
|
||||||
|
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
|
||||||
|
<script src="./index.js" type="module"></script>
|
||||||
|
</head>
|
||||||
|
<body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0; overflow-x: hidden;">
|
||||||
|
<div id="log"></div>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -1,27 +1,27 @@
|
||||||
import * as faceapi from '../dist/face-api.esm.js';
|
/**
|
||||||
|
* FaceAPI Demo for Browsers
|
||||||
|
* Loaded via `index.html`
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as faceapi from '../dist/face-api.esm.js'; // use when in dev mode
|
||||||
|
// import * as faceapi from '@vladmandic/face-api'; // use when downloading face-api as npm
|
||||||
|
|
||||||
// configuration options
|
// configuration options
|
||||||
const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http
|
const modelPath = '../model/'; // path to model folder that will be loaded using http
|
||||||
// const modelPath = '../model/'; // path to model folder that will be loaded using http
|
// const modelPath = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model/'; // path to model folder that will be loaded using http
|
||||||
const imgSize = 800; // maximum image size in pixels
|
const imgSize = 800; // maximum image size in pixels
|
||||||
const minScore = 0.3; // minimum score
|
const minScore = 0.3; // minimum score
|
||||||
const maxResults = 10; // maximum number of results to return
|
const maxResults = 10; // maximum number of results to return
|
||||||
const samples = ['sample (1).jpg', 'sample (2).jpg', 'sample (3).jpg', 'sample (4).jpg', 'sample (5).jpg', 'sample (6).jpg']; // sample images to be loaded using http
|
const samples = ['sample1.jpg', 'sample2.jpg', 'sample3.jpg', 'sample4.jpg', 'sample5.jpg', 'sample6.jpg']; // sample images to be loaded using http
|
||||||
|
|
||||||
// helper function to pretty-print json object to string
|
// helper function to pretty-print json object to string
|
||||||
function str(json) {
|
const str = (json) => (json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '');
|
||||||
let text = '<font color="lightblue">';
|
|
||||||
text += json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '';
|
|
||||||
text += '</font>';
|
|
||||||
return text;
|
|
||||||
}
|
|
||||||
|
|
||||||
// helper function to print strings to html document as a log
|
// helper function to print strings to html document as a log
|
||||||
function log(...txt) {
|
function log(...txt) {
|
||||||
// eslint-disable-next-line no-console
|
console.log(...txt); // eslint-disable-line no-console
|
||||||
console.log(...txt);
|
const div = document.getElementById('log');
|
||||||
// @ts-ignore
|
if (div) div.innerHTML += `<br>${txt}`;
|
||||||
document.getElementById('log').innerHTML += `<br>${txt}`;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// helper function to draw detected faces
|
// helper function to draw detected faces
|
||||||
|
@ -33,11 +33,9 @@ function faces(name, title, id, data) {
|
||||||
canvas.style.position = 'absolute';
|
canvas.style.position = 'absolute';
|
||||||
canvas.style.left = `${img.offsetLeft}px`;
|
canvas.style.left = `${img.offsetLeft}px`;
|
||||||
canvas.style.top = `${img.offsetTop}px`;
|
canvas.style.top = `${img.offsetTop}px`;
|
||||||
// @ts-ignore
|
|
||||||
canvas.width = img.width;
|
canvas.width = img.width;
|
||||||
// @ts-ignore
|
|
||||||
canvas.height = img.height;
|
canvas.height = img.height;
|
||||||
const ctx = canvas.getContext('2d');
|
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
||||||
if (!ctx) return;
|
if (!ctx) return;
|
||||||
// draw title
|
// draw title
|
||||||
ctx.font = '1rem sans-serif';
|
ctx.font = '1rem sans-serif';
|
||||||
|
@ -53,6 +51,7 @@ function faces(name, title, id, data) {
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
|
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
|
||||||
ctx.stroke();
|
ctx.stroke();
|
||||||
|
// draw text labels
|
||||||
ctx.globalAlpha = 1;
|
ctx.globalAlpha = 1;
|
||||||
ctx.fillText(`${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 18);
|
ctx.fillText(`${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 18);
|
||||||
ctx.fillText(`${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 2);
|
ctx.fillText(`${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 2);
|
||||||
|
@ -72,8 +71,7 @@ function faces(name, title, id, data) {
|
||||||
|
|
||||||
// helper function to draw processed image and its results
|
// helper function to draw processed image and its results
|
||||||
function print(title, img, data) {
|
function print(title, img, data) {
|
||||||
// eslint-disable-next-line no-console
|
console.log('Results:', title, img, data); // eslint-disable-line no-console
|
||||||
console.log('Results:', title, img, data);
|
|
||||||
const el = new Image();
|
const el = new Image();
|
||||||
el.id = Math.floor(Math.random() * 100000).toString();
|
el.id = Math.floor(Math.random() * 100000).toString();
|
||||||
el.src = img;
|
el.src = img;
|
||||||
|
@ -96,7 +94,7 @@ async function image(url) {
|
||||||
const canvas = document.createElement('canvas');
|
const canvas = document.createElement('canvas');
|
||||||
canvas.height = img.height;
|
canvas.height = img.height;
|
||||||
canvas.width = img.width;
|
canvas.width = img.width;
|
||||||
const ctx = canvas.getContext('2d');
|
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
||||||
if (ctx) ctx.drawImage(img, 0, 0, img.width, img.height);
|
if (ctx) ctx.drawImage(img, 0, 0, img.width, img.height);
|
||||||
// return generated canvas to be used by tfjs during detection
|
// return generated canvas to be used by tfjs during detection
|
||||||
resolve(canvas);
|
resolve(canvas);
|
||||||
|
@ -111,18 +109,23 @@ async function main() {
|
||||||
log('FaceAPI Test');
|
log('FaceAPI Test');
|
||||||
|
|
||||||
// if you want to use wasm backend location for wasm binaries must be specified
|
// if you want to use wasm backend location for wasm binaries must be specified
|
||||||
// await faceapi.tf.setWasmPaths('../node_modules/@tensorflow/tfjs-backend-wasm/dist/');
|
// await faceapi.tf?.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${faceapi.tf.version_core}/dist/`);
|
||||||
// await faceapi.tf.setBackend('wasm');
|
// await faceapi.tf?.setBackend('wasm');
|
||||||
|
// log(`WASM SIMD: ${await faceapi.tf?.env().getAsync('WASM_HAS_SIMD_SUPPORT')} Threads: ${await faceapi.tf?.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') ? 'Multi' : 'Single'}`);
|
||||||
|
|
||||||
// default is webgl backend
|
// default is webgl backend
|
||||||
await faceapi.tf.setBackend('webgl');
|
await faceapi.tf.setBackend('webgl');
|
||||||
|
await faceapi.tf.ready();
|
||||||
|
|
||||||
|
// tfjs optimizations
|
||||||
|
if (faceapi.tf?.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) faceapi.tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
|
||||||
|
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
|
||||||
|
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
|
||||||
await faceapi.tf.enableProdMode();
|
await faceapi.tf.enableProdMode();
|
||||||
await faceapi.tf.ENV.set('DEBUG', false);
|
|
||||||
await faceapi.tf.ready();
|
await faceapi.tf.ready();
|
||||||
|
|
||||||
// check version
|
// check version
|
||||||
log(`Version: FaceAPI ${str(faceapi?.version.faceapi || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
|
log(`Version: FaceAPI ${str(faceapi?.version || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
|
||||||
log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
|
log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
|
||||||
|
|
||||||
// load face-api models
|
// load face-api models
|
||||||
|
@ -140,16 +143,9 @@ async function main() {
|
||||||
const engine = await faceapi.tf.engine();
|
const engine = await faceapi.tf.engine();
|
||||||
log(`TF Engine State: ${str(engine.state)}`);
|
log(`TF Engine State: ${str(engine.state)}`);
|
||||||
|
|
||||||
// const testT = faceapi.tf.tensor([0]);
|
|
||||||
// const testF = testT.toFloat();
|
|
||||||
// console.log(testT.print(), testF.print());
|
|
||||||
// testT.dispose();
|
|
||||||
// testF.dispose();
|
|
||||||
|
|
||||||
// loop through all images and try to process them
|
// loop through all images and try to process them
|
||||||
log(`Start processing: ${samples.length} images ...<br>`);
|
log(`Start processing: ${samples.length} images ...<br>`);
|
||||||
for (const img of samples) {
|
for (const img of samples) {
|
||||||
// new line
|
|
||||||
document.body.appendChild(document.createElement('br'));
|
document.body.appendChild(document.createElement('br'));
|
||||||
// load and resize image
|
// load and resize image
|
||||||
const canvas = await image(img);
|
const canvas = await image(img);
|
||||||
|
@ -163,7 +159,7 @@ async function main() {
|
||||||
.withFaceDescriptors()
|
.withFaceDescriptors()
|
||||||
.withAgeAndGender();
|
.withAgeAndGender();
|
||||||
// print results to screen
|
// print results to screen
|
||||||
print('TinyFace Detector', img, dataTinyYolo);
|
print('TinyFace:', img, dataTinyYolo);
|
||||||
// actual model execution
|
// actual model execution
|
||||||
const dataSSDMobileNet = await faceapi
|
const dataSSDMobileNet = await faceapi
|
||||||
.detectAllFaces(canvas, optionsSSDMobileNet)
|
.detectAllFaces(canvas, optionsSSDMobileNet)
|
||||||
|
@ -172,11 +168,9 @@ async function main() {
|
||||||
.withFaceDescriptors()
|
.withFaceDescriptors()
|
||||||
.withAgeAndGender();
|
.withAgeAndGender();
|
||||||
// print results to screen
|
// print results to screen
|
||||||
print('SSD MobileNet', img, dataSSDMobileNet);
|
print('SSDMobileNet:', img, dataSSDMobileNet);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log(`Image: ${img} Error during processing ${str(err)}`);
|
log(`Image: ${img} Error during processing ${str(err)}`);
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.error(err);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -0,0 +1,98 @@
|
||||||
|
/**
|
||||||
|
* FaceAPI Demo for NodeJS
|
||||||
|
* - Uses external library [canvas](https://www.npmjs.com/package/canvas) to decode image
|
||||||
|
* - Loads image from provided param
|
||||||
|
* - Outputs results to console
|
||||||
|
*/
|
||||||
|
|
||||||
|
// canvas library provides full canvas (load/draw/write) functionality for nodejs
|
||||||
|
// must be installed manually as it just a demo dependency and not actual face-api dependency
|
||||||
|
const canvas = require('canvas'); // eslint-disable-line node/no-missing-require
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const process = require('process');
|
||||||
|
const log = require('@vladmandic/pilogger');
|
||||||
|
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
|
||||||
|
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||||
|
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||||
|
|
||||||
|
const modelPathRoot = '../model';
|
||||||
|
const imgPathRoot = './demo'; // modify to include your sample images
|
||||||
|
const minConfidence = 0.15;
|
||||||
|
const maxResults = 5;
|
||||||
|
let optionsSSDMobileNet;
|
||||||
|
|
||||||
|
async function image(input) {
|
||||||
|
const img = await canvas.loadImage(input);
|
||||||
|
const c = canvas.createCanvas(img.width, img.height);
|
||||||
|
const ctx = c.getContext('2d');
|
||||||
|
ctx.drawImage(img, 0, 0, img.width, img.height);
|
||||||
|
// const out = fs.createWriteStream('test.jpg');
|
||||||
|
// const stream = c.createJPEGStream({ quality: 0.6, progressive: true, chromaSubsampling: true });
|
||||||
|
// stream.pipe(out);
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function detect(tensor) {
|
||||||
|
const result = await faceapi
|
||||||
|
.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||||
|
.withFaceLandmarks()
|
||||||
|
.withFaceExpressions()
|
||||||
|
.withFaceDescriptors()
|
||||||
|
.withAgeAndGender();
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
function print(face) {
|
||||||
|
const expression = Object.entries(face.expressions).reduce((acc, val) => ((val[1] > acc[1]) ? val : acc), ['', 0]);
|
||||||
|
const box = [face.alignedRect._box._x, face.alignedRect._box._y, face.alignedRect._box._width, face.alignedRect._box._height];
|
||||||
|
const gender = `Gender: ${Math.round(100 * face.genderProbability)}% ${face.gender}`;
|
||||||
|
log.data(`Detection confidence: ${Math.round(100 * face.detection._score)}% ${gender} Age: ${Math.round(10 * face.age) / 10} Expression: ${Math.round(100 * expression[1])}% ${expression[0]} Box: ${box.map((a) => Math.round(a))}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
log.header();
|
||||||
|
log.info('FaceAPI single-process test');
|
||||||
|
|
||||||
|
faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
|
||||||
|
|
||||||
|
await faceapi.tf.setBackend('tensorflow');
|
||||||
|
await faceapi.tf.ready();
|
||||||
|
|
||||||
|
log.state(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf?.getBackend()}`);
|
||||||
|
|
||||||
|
log.info('Loading FaceAPI models');
|
||||||
|
const modelPath = path.join(__dirname, modelPathRoot);
|
||||||
|
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
||||||
|
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
|
||||||
|
|
||||||
|
if (process.argv.length !== 3) {
|
||||||
|
const t0 = process.hrtime.bigint();
|
||||||
|
const dir = fs.readdirSync(imgPathRoot);
|
||||||
|
let numImages = 0;
|
||||||
|
for (const img of dir) {
|
||||||
|
if (!img.toLocaleLowerCase().endsWith('.jpg')) continue;
|
||||||
|
numImages += 1;
|
||||||
|
const c = await image(path.join(imgPathRoot, img));
|
||||||
|
const result = await detect(c);
|
||||||
|
log.data('Image:', img, 'Detected faces:', result.length);
|
||||||
|
for (const face of result) print(face);
|
||||||
|
}
|
||||||
|
const t1 = process.hrtime.bigint();
|
||||||
|
log.info('Processed', numImages, 'images in', Math.trunc(Number((t1 - t0).toString()) / 1000 / 1000), 'ms');
|
||||||
|
} else {
|
||||||
|
const param = process.argv[2];
|
||||||
|
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {
|
||||||
|
const c = await image(param);
|
||||||
|
const result = await detect(c);
|
||||||
|
log.data('Image:', param, 'Detected faces:', result.length);
|
||||||
|
for (const face of result) print(face);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
|
@ -0,0 +1,35 @@
|
||||||
|
/**
|
||||||
|
* FaceAPI demo that loads two images and finds similarity most prominant face in each image
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const tf = require('@tensorflow/tfjs-node');
|
||||||
|
const faceapi = require('../dist/face-api.node');
|
||||||
|
|
||||||
|
let optionsSSDMobileNet;
|
||||||
|
|
||||||
|
const getDescriptors = async (imageFile) => {
|
||||||
|
const buffer = fs.readFileSync(imageFile);
|
||||||
|
const tensor = tf.node.decodeImage(buffer, 3);
|
||||||
|
const faces = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||||
|
.withFaceLandmarks()
|
||||||
|
.withFaceDescriptors();
|
||||||
|
tf.dispose(tensor);
|
||||||
|
return faces.map((face) => face.descriptor);
|
||||||
|
};
|
||||||
|
|
||||||
|
const main = async (file1, file2) => {
|
||||||
|
console.log('input images:', file1, file2); // eslint-disable-line no-console
|
||||||
|
await tf.ready();
|
||||||
|
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model');
|
||||||
|
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.5, maxResults: 1 });
|
||||||
|
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
|
||||||
|
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
|
||||||
|
const desc1 = await getDescriptors(file1);
|
||||||
|
const desc2 = await getDescriptors(file2);
|
||||||
|
const distance = faceapi.euclideanDistance(desc1[0], desc2[0]); // only compare first found face in each image
|
||||||
|
console.log('distance between most prominant detected faces:', distance); // eslint-disable-line no-console
|
||||||
|
console.log('similarity between most prominant detected faces:', 1 - distance); // eslint-disable-line no-console
|
||||||
|
};
|
||||||
|
|
||||||
|
main('demo/sample1.jpg', 'demo/sample2.jpg');
|
|
@ -0,0 +1,54 @@
|
||||||
|
/**
|
||||||
|
* FaceAPI Demo for NodeJS
|
||||||
|
* - Uses external library [@canvas/image](https://www.npmjs.com/package/@canvas/image) to decode image
|
||||||
|
* - Loads image from provided param
|
||||||
|
* - Outputs results to console
|
||||||
|
*/
|
||||||
|
|
||||||
|
// @canvas/image can decode jpeg, png, webp
|
||||||
|
// must be installed manually as it just a demo dependency and not actual face-api dependency
|
||||||
|
const image = require('@canvas/image'); // eslint-disable-line node/no-missing-require
|
||||||
|
const fs = require('fs');
|
||||||
|
const log = require('@vladmandic/pilogger');
|
||||||
|
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
|
||||||
|
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||||
|
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||||
|
|
||||||
|
const modelPath = 'model/';
|
||||||
|
const imageFile = 'demo/sample1.jpg';
|
||||||
|
const ssdOptions = { minConfidence: 0.1, maxResults: 10 };
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
log.header();
|
||||||
|
const buffer = fs.readFileSync(imageFile); // read image from disk
|
||||||
|
const canvas = await image.imageFromBuffer(buffer); // decode to canvas
|
||||||
|
const imageData = image.getImageData(canvas); // read decoded image data from canvas
|
||||||
|
log.info('image:', imageFile, canvas.width, canvas.height);
|
||||||
|
|
||||||
|
const tensor = tf.tidy(() => { // create tensor from image data
|
||||||
|
const data = tf.tensor(Array.from(imageData?.data || []), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array and flip to height x width
|
||||||
|
const channels = tf.split(data, 4, 2); // split rgba to channels
|
||||||
|
const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb
|
||||||
|
const reshape = tf.reshape(rgb, [1, canvas.height, canvas.width, 3]); // move extra dim from the end of tensor and use it as batch number instead
|
||||||
|
return reshape;
|
||||||
|
});
|
||||||
|
log.info('tensor:', tensor.shape, tensor.size);
|
||||||
|
|
||||||
|
// load models
|
||||||
|
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
||||||
|
|
||||||
|
const optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options(ssdOptions); // create options object
|
||||||
|
const result = await faceapi // run detection
|
||||||
|
.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||||
|
.withFaceLandmarks()
|
||||||
|
.withFaceExpressions()
|
||||||
|
.withFaceDescriptors()
|
||||||
|
.withAgeAndGender();
|
||||||
|
log.data('results:', result.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
|
@ -0,0 +1,84 @@
|
||||||
|
/**
|
||||||
|
* FaceAPI Demo for NodeJS
|
||||||
|
* - Analyzes face descriptors from source (image file or folder containing multiple image files)
|
||||||
|
* - Analyzes face descriptor from target
|
||||||
|
* - Finds best match
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const log = require('@vladmandic/pilogger');
|
||||||
|
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
|
||||||
|
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||||
|
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||||
|
|
||||||
|
let optionsSSDMobileNet;
|
||||||
|
const minConfidence = 0.1;
|
||||||
|
const distanceThreshold = 0.5;
|
||||||
|
const modelPath = 'model';
|
||||||
|
const labeledFaceDescriptors = [];
|
||||||
|
|
||||||
|
async function initFaceAPI() {
|
||||||
|
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
|
||||||
|
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults: 1 });
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getDescriptors(imageFile) {
|
||||||
|
const buffer = fs.readFileSync(imageFile);
|
||||||
|
const tensor = tf.node.decodeImage(buffer, 3);
|
||||||
|
const faces = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||||
|
.withFaceLandmarks()
|
||||||
|
.withFaceExpressions()
|
||||||
|
.withFaceDescriptors();
|
||||||
|
tf.dispose(tensor);
|
||||||
|
return faces.map((face) => face.descriptor);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function registerImage(inputFile) {
|
||||||
|
if (!inputFile.toLowerCase().endsWith('jpg') && !inputFile.toLowerCase().endsWith('png') && !inputFile.toLowerCase().endsWith('gif')) return;
|
||||||
|
log.data('Registered:', inputFile);
|
||||||
|
const descriptors = await getDescriptors(inputFile);
|
||||||
|
for (const descriptor of descriptors) {
|
||||||
|
const labeledFaceDescriptor = new faceapi.LabeledFaceDescriptors(inputFile, [descriptor]);
|
||||||
|
labeledFaceDescriptors.push(labeledFaceDescriptor);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function findBestMatch(inputFile) {
|
||||||
|
const matcher = new faceapi.FaceMatcher(labeledFaceDescriptors, distanceThreshold);
|
||||||
|
const descriptors = await getDescriptors(inputFile);
|
||||||
|
const matches = [];
|
||||||
|
for (const descriptor of descriptors) {
|
||||||
|
const match = await matcher.findBestMatch(descriptor);
|
||||||
|
matches.push(match);
|
||||||
|
}
|
||||||
|
return matches;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
log.header();
|
||||||
|
if (process.argv.length !== 4) {
|
||||||
|
log.error(process.argv[1], 'Expected <source image or folder> <target image>');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
await initFaceAPI();
|
||||||
|
log.info('Input:', process.argv[2]);
|
||||||
|
if (fs.statSync(process.argv[2]).isFile()) {
|
||||||
|
await registerImage(process.argv[2]); // register image
|
||||||
|
} else if (fs.statSync(process.argv[2]).isDirectory()) {
|
||||||
|
const dir = fs.readdirSync(process.argv[2]);
|
||||||
|
for (const f of dir) await registerImage(path.join(process.argv[2], f)); // register all images in a folder
|
||||||
|
}
|
||||||
|
log.info('Comparing:', process.argv[3], 'Descriptors:', labeledFaceDescriptors.length);
|
||||||
|
if (labeledFaceDescriptors.length > 0) {
|
||||||
|
const bestMatch = await findBestMatch(process.argv[3]); // find best match to all registered images
|
||||||
|
log.data('Match:', bestMatch);
|
||||||
|
} else {
|
||||||
|
log.warn('No registered faces');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
|
@ -1,18 +1,20 @@
|
||||||
// @ts-nocheck
|
/**
|
||||||
|
* FaceAPI Demo for NodeJS
|
||||||
|
* - Used by `node-multiprocess.js`
|
||||||
|
*/
|
||||||
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger');
|
||||||
|
|
||||||
// workers actual import tfjs and faceapi modules
|
// workers actual import tfjs and faceapi modules
|
||||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
|
||||||
const tf = require('@tensorflow/tfjs-node');
|
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||||
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
|
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||||
|
|
||||||
// options used by faceapi
|
// options used by faceapi
|
||||||
const modelPathRoot = '../model';
|
const modelPathRoot = '../model';
|
||||||
const minScore = 0.1;
|
const minConfidence = 0.15;
|
||||||
const maxResults = 5;
|
const maxResults = 5;
|
||||||
let optionsSSDMobileNet;
|
let optionsSSDMobileNet;
|
||||||
|
|
||||||
|
@ -30,10 +32,10 @@ async function detect(img) {
|
||||||
const tensor = await image(img);
|
const tensor = await image(img);
|
||||||
const result = await faceapi
|
const result = await faceapi
|
||||||
.detectAllFaces(tensor, optionsSSDMobileNet)
|
.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||||
.withFaceLandmarks()
|
.withFaceLandmarks();
|
||||||
.withFaceExpressions()
|
// .withFaceExpressions()
|
||||||
.withFaceDescriptors()
|
// .withFaceDescriptors()
|
||||||
.withAgeAndGender();
|
// .withAgeAndGender();
|
||||||
process.send({ image: img, detected: result }); // send results back to main
|
process.send({ image: img, detected: result }); // send results back to main
|
||||||
process.send({ ready: true }); // send signal back to main that this worker is now idle and ready for next image
|
process.send({ ready: true }); // send signal back to main that this worker is now idle and ready for next image
|
||||||
tensor.dispose();
|
tensor.dispose();
|
||||||
|
@ -53,7 +55,7 @@ async function main() {
|
||||||
await faceapi.tf.enableProdMode();
|
await faceapi.tf.enableProdMode();
|
||||||
await faceapi.tf.ENV.set('DEBUG', false);
|
await faceapi.tf.ENV.set('DEBUG', false);
|
||||||
await faceapi.tf.ready();
|
await faceapi.tf.ready();
|
||||||
log.state('Worker: PID:', process.pid, `TensorFlow/JS ${faceapi.tf.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf.getBackend()}`);
|
log.state('Worker: PID:', process.pid, `TensorFlow/JS ${faceapi.tf.version_core} FaceAPI ${faceapi.version} Backend: ${faceapi.tf.getBackend()}`);
|
||||||
|
|
||||||
// and load and initialize facepi models
|
// and load and initialize facepi models
|
||||||
const modelPath = path.join(__dirname, modelPathRoot);
|
const modelPath = path.join(__dirname, modelPathRoot);
|
||||||
|
@ -62,7 +64,7 @@ async function main() {
|
||||||
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
|
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
|
||||||
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
|
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
|
||||||
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
||||||
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
|
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
|
||||||
|
|
||||||
// now we're ready, so send message back to main that it knows it can use this worker
|
// now we're ready, so send message back to main that it knows it can use this worker
|
||||||
process.send({ ready: true });
|
process.send({ ready: true });
|
|
@ -1,24 +1,27 @@
|
||||||
// @ts-nocheck
|
/**
|
||||||
|
* FaceAPI Demo for NodeJS
|
||||||
|
* - Starts multiple worker processes and uses them as worker pool to process all input images
|
||||||
|
* - Images are enumerated in main process and sent for processing to worker processes via ipc
|
||||||
|
*/
|
||||||
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
|
||||||
const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features
|
const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features
|
||||||
const child_process = require('child_process');
|
const child_process = require('child_process');
|
||||||
// note that main process import faceapi or tfjs at all
|
// note that main process does not need to import faceapi or tfjs at all as processing is done in a worker process
|
||||||
|
|
||||||
const imgPathRoot = './example'; // modify to include your sample images
|
const imgPathRoot = './demo'; // modify to include your sample images
|
||||||
const numWorkers = 2; // how many workers will be started
|
const numWorkers = 4; // how many workers will be started
|
||||||
const workers = []; // this holds worker processes
|
const workers = []; // this holds worker processes
|
||||||
const images = []; // this holds queue of enumerated images
|
const images = []; // this holds queue of enumerated images
|
||||||
const t = []; // timers
|
const t = []; // timers
|
||||||
let dir;
|
let numImages;
|
||||||
|
|
||||||
// trigered by main when worker sends ready message
|
// trigered by main when worker sends ready message
|
||||||
// if image pool is empty, signal worker to exit otherwise dispatch image to worker and remove image from queue
|
// if image pool is empty, signal worker to exit otherwise dispatch image to worker and remove image from queue
|
||||||
async function detect(worker) {
|
async function detect(worker) {
|
||||||
if (!t[2]) t[2] = process.hrtime.bigint(); // first time do a timestamp so we can measure initial latency
|
if (!t[2]) t[2] = process.hrtime.bigint(); // first time do a timestamp so we can measure initial latency
|
||||||
if (images.length === dir.length) worker.send({ test: true }); // for first image in queue just measure latency
|
if (images.length === numImages) worker.send({ test: true }); // for first image in queue just measure latency
|
||||||
if (images.length === 0) worker.send({ exit: true }); // nothing left in queue
|
if (images.length === 0) worker.send({ exit: true }); // nothing left in queue
|
||||||
else {
|
else {
|
||||||
log.state('Main: dispatching to worker:', worker.pid);
|
log.state('Main: dispatching to worker:', worker.pid);
|
||||||
|
@ -33,14 +36,14 @@ function waitCompletion() {
|
||||||
if (activeWorkers > 0) setImmediate(() => waitCompletion());
|
if (activeWorkers > 0) setImmediate(() => waitCompletion());
|
||||||
else {
|
else {
|
||||||
t[1] = process.hrtime.bigint();
|
t[1] = process.hrtime.bigint();
|
||||||
log.info('Processed', dir.length, 'images in', Math.trunc(parseInt(t[1] - t[0]) / 1000 / 1000), 'ms');
|
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(Number(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(Number(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(Number(t[1] - t[2]) / numImages / 1000000), 'ms');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function measureLatency() {
|
function measureLatency() {
|
||||||
t[3] = process.hrtime.bigint();
|
t[3] = process.hrtime.bigint();
|
||||||
const latencyInitialization = Math.trunc(parseInt(t[2] - t[0]) / 1000 / 1000);
|
const latencyInitialization = Math.trunc(Number(t[2] - t[0]) / 1000 / 1000);
|
||||||
const latencyRoundTrip = Math.trunc(parseInt(t[3] - t[2]) / 1000 / 1000);
|
const latencyRoundTrip = Math.trunc(Number(t[3] - t[2]) / 1000 / 1000);
|
||||||
log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip);
|
log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,16 +52,17 @@ async function main() {
|
||||||
log.info('FaceAPI multi-process test');
|
log.info('FaceAPI multi-process test');
|
||||||
|
|
||||||
// enumerate all images into queue
|
// enumerate all images into queue
|
||||||
dir = fs.readdirSync(imgPathRoot);
|
const dir = fs.readdirSync(imgPathRoot);
|
||||||
for (const imgFile of dir) {
|
for (const imgFile of dir) {
|
||||||
if (imgFile.toLocaleLowerCase().endsWith('.jpg')) images.push(path.join(imgPathRoot, imgFile));
|
if (imgFile.toLocaleLowerCase().endsWith('.jpg')) images.push(path.join(imgPathRoot, imgFile));
|
||||||
}
|
}
|
||||||
|
numImages = images.length;
|
||||||
|
|
||||||
t[0] = process.hrtime.bigint();
|
t[0] = process.hrtime.bigint();
|
||||||
// manage worker processes
|
// manage worker processes
|
||||||
for (let i = 0; i < numWorkers; i++) {
|
for (let i = 0; i < numWorkers; i++) {
|
||||||
// create worker process
|
// create worker process
|
||||||
workers[i] = await child_process.fork('example/node-multiprocess-worker.js', ['special']);
|
workers[i] = await child_process.fork('demo/node-multiprocess-worker.js', ['special']);
|
||||||
// parse message that worker process sends back to main
|
// parse message that worker process sends back to main
|
||||||
// if message is ready, dispatch next image in queue
|
// if message is ready, dispatch next image in queue
|
||||||
// if message is processing result, just print how many faces were detected
|
// if message is processing result, just print how many faces were detected
|
|
@ -0,0 +1,31 @@
|
||||||
|
/**
|
||||||
|
* FaceAPI Demo for NodeJS
|
||||||
|
* - Loads image
|
||||||
|
* - Outputs results to console
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
|
||||||
|
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||||
|
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model'); // load models from a specific patch
|
||||||
|
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
|
||||||
|
await faceapi.nets.ageGenderNet.loadFromDisk('model');
|
||||||
|
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
|
||||||
|
await faceapi.nets.faceExpressionNet.loadFromDisk('model');
|
||||||
|
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.1, maxResults: 10 }); // set model options
|
||||||
|
const buffer = fs.readFileSync('demo/sample1.jpg'); // load jpg image as binary
|
||||||
|
const decodeT = faceapi.tf.node.decodeImage(buffer, 3); // decode binary buffer to rgb tensor
|
||||||
|
const expandT = faceapi.tf.expandDims(decodeT, 0); // add batch dimension to tensor
|
||||||
|
const result = await faceapi.detectAllFaces(expandT, options) // run detection
|
||||||
|
.withFaceLandmarks()
|
||||||
|
.withFaceExpressions()
|
||||||
|
.withFaceDescriptors()
|
||||||
|
.withAgeAndGender();
|
||||||
|
faceapi.tf.dispose([decodeT, expandT]); // dispose tensors to avoid memory leaks
|
||||||
|
console.log({ result }); // eslint-disable-line no-console
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
|
@ -0,0 +1,53 @@
|
||||||
|
/**
|
||||||
|
* FaceAPI Demo for NodeJS using WASM
|
||||||
|
* - Loads WASM binaries from external CDN
|
||||||
|
* - Loads image
|
||||||
|
* - Outputs results to console
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const image = require('@canvas/image'); // eslint-disable-line node/no-missing-require
|
||||||
|
const tf = require('@tensorflow/tfjs');
|
||||||
|
const wasm = require('@tensorflow/tfjs-backend-wasm');
|
||||||
|
const faceapi = require('../dist/face-api.node-wasm.js'); // use this when using face-api in dev mode
|
||||||
|
|
||||||
|
async function readImage(imageFile) {
|
||||||
|
const buffer = fs.readFileSync(imageFile); // read image from disk
|
||||||
|
const canvas = await image.imageFromBuffer(buffer); // decode to canvas
|
||||||
|
const imageData = image.getImageData(canvas); // read decoded image data from canvas
|
||||||
|
const tensor = tf.tidy(() => { // create tensor from image data
|
||||||
|
const data = tf.tensor(Array.from(imageData?.data || []), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array and flip to height x width
|
||||||
|
const channels = tf.split(data, 4, 2); // split rgba to channels
|
||||||
|
const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb
|
||||||
|
const squeeze = tf.squeeze(rgb); // move extra dim from the end of tensor and use it as batch number instead
|
||||||
|
return squeeze;
|
||||||
|
});
|
||||||
|
console.log(`Image: ${imageFile} [${canvas.width} x ${canvas.height} Tensor: ${tensor.shape}, Size: ${tensor.size}`); // eslint-disable-line no-console
|
||||||
|
return tensor;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/', true);
|
||||||
|
await tf.setBackend('wasm');
|
||||||
|
await tf.ready();
|
||||||
|
console.log(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf.getBackend()}`); // eslint-disable-line no-console
|
||||||
|
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model'); // load models from a specific patch
|
||||||
|
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
|
||||||
|
await faceapi.nets.ageGenderNet.loadFromDisk('model');
|
||||||
|
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
|
||||||
|
await faceapi.nets.faceExpressionNet.loadFromDisk('model');
|
||||||
|
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.1, maxResults: 10 }); // set model options
|
||||||
|
const tensor = await readImage('demo/sample1.jpg');
|
||||||
|
const t0 = performance.now();
|
||||||
|
const result = await faceapi.detectAllFaces(tensor, options) // run detection
|
||||||
|
.withFaceLandmarks()
|
||||||
|
.withFaceExpressions()
|
||||||
|
.withFaceDescriptors()
|
||||||
|
.withAgeAndGender();
|
||||||
|
tf.dispose(tensor); // dispose tensors to avoid memory leaks
|
||||||
|
const t1 = performance.now();
|
||||||
|
console.log('Time', t1 - t0); // eslint-disable-line no-console
|
||||||
|
console.log('Result', result); // eslint-disable-line no-console
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
|
@ -0,0 +1,139 @@
|
||||||
|
/**
|
||||||
|
* FaceAPI Demo for NodeJS
|
||||||
|
* - Uses external library [node-fetch](https://www.npmjs.com/package/node-fetch) to load images via http
|
||||||
|
* - Loads image from provided param
|
||||||
|
* - Outputs results to console
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const process = require('process');
|
||||||
|
const path = require('path');
|
||||||
|
const log = require('@vladmandic/pilogger');
|
||||||
|
|
||||||
|
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
|
||||||
|
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
|
||||||
|
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
|
||||||
|
|
||||||
|
const modelPathRoot = '../model';
|
||||||
|
const imgPathRoot = './demo'; // modify to include your sample images
|
||||||
|
const minConfidence = 0.15;
|
||||||
|
const maxResults = 5;
|
||||||
|
let optionsSSDMobileNet;
|
||||||
|
let fetch; // dynamically imported later
|
||||||
|
|
||||||
|
async function image(input) {
|
||||||
|
// read input image file and create tensor to be used for processing
|
||||||
|
let buffer;
|
||||||
|
log.info('Loading image:', input);
|
||||||
|
if (input.startsWith('http:') || input.startsWith('https:')) {
|
||||||
|
const res = await fetch(input);
|
||||||
|
if (res && res.ok) buffer = await res.buffer();
|
||||||
|
else log.error('Invalid image URL:', input, res.status, res.statusText, res.headers.get('content-type'));
|
||||||
|
} else {
|
||||||
|
buffer = fs.readFileSync(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
// decode image using tfjs-node so we don't need external depenencies
|
||||||
|
// can also be done using canvas.js or some other 3rd party image library
|
||||||
|
if (!buffer) return {};
|
||||||
|
const tensor = tf.tidy(() => {
|
||||||
|
const decode = faceapi.tf.node.decodeImage(buffer, 3);
|
||||||
|
let expand;
|
||||||
|
if (decode.shape[2] === 4) { // input is in rgba format, need to convert to rgb
|
||||||
|
const channels = faceapi.tf.split(decode, 4, 2); // tf.split(tensor, 4, 2); // split rgba to channels
|
||||||
|
const rgb = faceapi.tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb and ignore alpha
|
||||||
|
expand = faceapi.tf.reshape(rgb, [1, decode.shape[0], decode.shape[1], 3]); // move extra dim from the end of tensor and use it as batch number instead
|
||||||
|
} else {
|
||||||
|
expand = faceapi.tf.expandDims(decode, 0);
|
||||||
|
}
|
||||||
|
const cast = faceapi.tf.cast(expand, 'float32');
|
||||||
|
return cast;
|
||||||
|
});
|
||||||
|
return tensor;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function detect(tensor) {
|
||||||
|
try {
|
||||||
|
const result = await faceapi
|
||||||
|
.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||||
|
.withFaceLandmarks()
|
||||||
|
.withFaceExpressions()
|
||||||
|
.withFaceDescriptors()
|
||||||
|
.withAgeAndGender();
|
||||||
|
return result;
|
||||||
|
} catch (err) {
|
||||||
|
log.error('Caught error', err.message);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||||
|
function detectPromise(tensor) {
|
||||||
|
return new Promise((resolve) => faceapi
|
||||||
|
.detectAllFaces(tensor, optionsSSDMobileNet)
|
||||||
|
.withFaceLandmarks()
|
||||||
|
.withFaceExpressions()
|
||||||
|
.withFaceDescriptors()
|
||||||
|
.withAgeAndGender()
|
||||||
|
.then((res) => resolve(res))
|
||||||
|
.catch((err) => {
|
||||||
|
log.error('Caught error', err.message);
|
||||||
|
resolve([]);
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
function print(face) {
|
||||||
|
const expression = Object.entries(face.expressions).reduce((acc, val) => ((val[1] > acc[1]) ? val : acc), ['', 0]);
|
||||||
|
const box = [face.alignedRect._box._x, face.alignedRect._box._y, face.alignedRect._box._width, face.alignedRect._box._height];
|
||||||
|
const gender = `Gender: ${Math.round(100 * face.genderProbability)}% ${face.gender}`;
|
||||||
|
log.data(`Detection confidence: ${Math.round(100 * face.detection._score)}% ${gender} Age: ${Math.round(10 * face.age) / 10} Expression: ${Math.round(100 * expression[1])}% ${expression[0]} Box: ${box.map((a) => Math.round(a))}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
log.header();
|
||||||
|
log.info('FaceAPI single-process test');
|
||||||
|
|
||||||
|
// eslint-disable-next-line node/no-extraneous-import
|
||||||
|
fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-missing-import
|
||||||
|
|
||||||
|
await faceapi.tf.setBackend('tensorflow');
|
||||||
|
await faceapi.tf.ready();
|
||||||
|
|
||||||
|
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version} Backend: ${faceapi.tf?.getBackend()}`);
|
||||||
|
|
||||||
|
log.info('Loading FaceAPI models');
|
||||||
|
const modelPath = path.join(__dirname, modelPathRoot);
|
||||||
|
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
|
||||||
|
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
||||||
|
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
|
||||||
|
|
||||||
|
if (process.argv.length !== 4) {
|
||||||
|
const t0 = process.hrtime.bigint();
|
||||||
|
const dir = fs.readdirSync(imgPathRoot);
|
||||||
|
for (const img of dir) {
|
||||||
|
if (!img.toLocaleLowerCase().endsWith('.jpg')) continue;
|
||||||
|
const tensor = await image(path.join(imgPathRoot, img));
|
||||||
|
const result = await detect(tensor);
|
||||||
|
log.data('Image:', img, 'Detected faces:', result.length);
|
||||||
|
for (const face of result) print(face);
|
||||||
|
tensor.dispose();
|
||||||
|
}
|
||||||
|
const t1 = process.hrtime.bigint();
|
||||||
|
log.info('Processed', dir.length, 'images in', Math.trunc(Number((t1 - t0)) / 1000 / 1000), 'ms');
|
||||||
|
} else {
|
||||||
|
const param = process.argv[2];
|
||||||
|
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {
|
||||||
|
const tensor = await image(param);
|
||||||
|
const result = await detect(tensor);
|
||||||
|
// const result = await detectPromise(null);
|
||||||
|
log.data('Image:', param, 'Detected faces:', result.length);
|
||||||
|
for (const face of result) print(face);
|
||||||
|
tensor.dispose();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
Before Width: | Height: | Size: 141 KiB After Width: | Height: | Size: 141 KiB |
Before Width: | Height: | Size: 178 KiB After Width: | Height: | Size: 178 KiB |
Before Width: | Height: | Size: 216 KiB After Width: | Height: | Size: 216 KiB |
Before Width: | Height: | Size: 206 KiB After Width: | Height: | Size: 206 KiB |
Before Width: | Height: | Size: 162 KiB After Width: | Height: | Size: 162 KiB |
Before Width: | Height: | Size: 295 KiB After Width: | Height: | Size: 295 KiB |
Before Width: | Height: | Size: 569 KiB After Width: | Height: | Size: 569 KiB |
After Width: | Height: | Size: 240 KiB |
|
@ -0,0 +1,21 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<title>FaceAPI Live WebCam Demo</title>
|
||||||
|
<meta http-equiv="content-type" content="text/html; charset=utf-8">
|
||||||
|
<meta name="viewport" content="width=device-width, shrink-to-fit=yes">
|
||||||
|
<meta name="application-name" content="FaceAPI">
|
||||||
|
<meta name="keywords" content="FaceAPI">
|
||||||
|
<meta name="description" content="FaceAPI: AI-powered Face Detection, Description & Recognition for Browser and NodeJS using Tensorflow/JS; Author: Vladimir Mandic <https://github.com/vladmandic>">
|
||||||
|
<meta name="msapplication-tooltip" content="FaceAPI: AI-powered Face Detection, Description & Recognition for Browser and NodeJS using Tensorflow/JS; Author: Vladimir Mandic <https://github.com/vladmandic>">
|
||||||
|
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
|
||||||
|
<script src="./webcam.js" type="module"></script>
|
||||||
|
</head>
|
||||||
|
<body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0; overflow: hidden">
|
||||||
|
<video id="video" playsinline class="video"></video>
|
||||||
|
<canvas id="canvas" class="canvas" style="position: fixed; top: 0; left: 0; z-index: 10"></canvas>
|
||||||
|
<div id="log" style="overflow-y: scroll; height: 16.5rem"></div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,14 @@
|
||||||
import * as faceapi from '../dist/face-api.esm.js';
|
/**
|
||||||
|
* FaceAPI Demo for Browsers
|
||||||
|
* Loaded via `webcam.html`
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as faceapi from '../dist/face-api.esm.js'; // use when in dev mode
|
||||||
|
// import * as faceapi from '@vladmandic/face-api'; // use when downloading face-api as npm
|
||||||
|
|
||||||
// configuration options
|
// configuration options
|
||||||
const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http
|
const modelPath = '../model/'; // path to model folder that will be loaded using http
|
||||||
|
// const modelPath = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model/'; // path to model folder that will be loaded using http
|
||||||
const minScore = 0.2; // minimum score
|
const minScore = 0.2; // minimum score
|
||||||
const maxResults = 5; // maximum number of results to return
|
const maxResults = 5; // maximum number of results to return
|
||||||
let optionsSSDMobileNet;
|
let optionsSSDMobileNet;
|
||||||
|
@ -16,19 +23,18 @@ function str(json) {
|
||||||
|
|
||||||
// helper function to print strings to html document as a log
|
// helper function to print strings to html document as a log
|
||||||
function log(...txt) {
|
function log(...txt) {
|
||||||
// eslint-disable-next-line no-console
|
console.log(...txt); // eslint-disable-line no-console
|
||||||
console.log(...txt);
|
const div = document.getElementById('log');
|
||||||
// @ts-ignore
|
if (div) div.innerHTML += `<br>${txt}`;
|
||||||
document.getElementById('log').innerHTML += `<br>${txt}`;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// helper function to draw detected faces
|
// helper function to draw detected faces
|
||||||
function drawFaces(canvas, data, fps) {
|
function drawFaces(canvas, data, fps) {
|
||||||
const ctx = canvas.getContext('2d');
|
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
||||||
if (!ctx) return;
|
if (!ctx) return;
|
||||||
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
||||||
// draw title
|
// draw title
|
||||||
ctx.font = '1.2rem sans-serif';
|
ctx.font = 'small-caps 20px "Segoe UI"';
|
||||||
ctx.fillStyle = 'white';
|
ctx.fillStyle = 'white';
|
||||||
ctx.fillText(`FPS: ${fps}`, 10, 25);
|
ctx.fillText(`FPS: ${fps}`, 10, 25);
|
||||||
for (const person of data) {
|
for (const person of data) {
|
||||||
|
@ -36,25 +42,30 @@ function drawFaces(canvas, data, fps) {
|
||||||
ctx.lineWidth = 3;
|
ctx.lineWidth = 3;
|
||||||
ctx.strokeStyle = 'deepskyblue';
|
ctx.strokeStyle = 'deepskyblue';
|
||||||
ctx.fillStyle = 'deepskyblue';
|
ctx.fillStyle = 'deepskyblue';
|
||||||
ctx.globalAlpha = 0.4;
|
ctx.globalAlpha = 0.6;
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
|
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
|
||||||
ctx.stroke();
|
ctx.stroke();
|
||||||
ctx.globalAlpha = 1;
|
ctx.globalAlpha = 1;
|
||||||
// const expression = person.expressions.sort((a, b) => Object.values(a)[0] - Object.values(b)[0]);
|
// draw text labels
|
||||||
const expression = Object.entries(person.expressions).sort((a, b) => b[1] - a[1]);
|
const expression = Object.entries(person.expressions).sort((a, b) => b[1] - a[1]);
|
||||||
ctx.fillText(`gender ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 60);
|
ctx.fillStyle = 'black';
|
||||||
ctx.fillText(`expression ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 42);
|
ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 59);
|
||||||
ctx.fillText(`age ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 24);
|
ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 41);
|
||||||
ctx.fillText(`roll:${person.angle.roll.toFixed(3)} pitch:${person.angle.pitch.toFixed(3)} yaw:${person.angle.yaw.toFixed(3)}`, person.detection.box.x, person.detection.box.y - 6);
|
ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 23);
|
||||||
// draw face points for each face
|
ctx.fillText(`roll:${person.angle.roll}° pitch:${person.angle.pitch}° yaw:${person.angle.yaw}°`, person.detection.box.x, person.detection.box.y - 5);
|
||||||
|
ctx.fillStyle = 'lightblue';
|
||||||
|
ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 60);
|
||||||
|
ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 42);
|
||||||
|
ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 24);
|
||||||
|
ctx.fillText(`roll:${person.angle.roll}° pitch:${person.angle.pitch}° yaw:${person.angle.yaw}°`, person.detection.box.x, person.detection.box.y - 6);
|
||||||
|
// draw face points for each face
|
||||||
|
ctx.globalAlpha = 0.8;
|
||||||
ctx.fillStyle = 'lightblue';
|
ctx.fillStyle = 'lightblue';
|
||||||
ctx.globalAlpha = 0.5;
|
|
||||||
const pointSize = 2;
|
const pointSize = 2;
|
||||||
for (let i = 0; i < person.landmarks.positions.length; i++) {
|
for (let i = 0; i < person.landmarks.positions.length; i++) {
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
ctx.arc(person.landmarks.positions[i].x, person.landmarks.positions[i].y, pointSize, 0, 2 * Math.PI);
|
ctx.arc(person.landmarks.positions[i].x, person.landmarks.positions[i].y, pointSize, 0, 2 * Math.PI);
|
||||||
// ctx.fillText(`${i}`, person.landmarks.positions[i].x + 4, person.landmarks.positions[i].y + 4);
|
|
||||||
ctx.fill();
|
ctx.fill();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -88,7 +99,6 @@ async function setupCamera() {
|
||||||
const canvas = document.getElementById('canvas');
|
const canvas = document.getElementById('canvas');
|
||||||
if (!video || !canvas) return null;
|
if (!video || !canvas) return null;
|
||||||
|
|
||||||
let msg = '';
|
|
||||||
log('Setting up camera');
|
log('Setting up camera');
|
||||||
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https
|
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https
|
||||||
if (!navigator.mediaDevices) {
|
if (!navigator.mediaDevices) {
|
||||||
|
@ -96,53 +106,44 @@ async function setupCamera() {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
let stream;
|
let stream;
|
||||||
const constraints = {
|
const constraints = { audio: false, video: { facingMode: 'user', resizeMode: 'crop-and-scale' } };
|
||||||
audio: false,
|
|
||||||
video: { facingMode: 'user', resizeMode: 'crop-and-scale' },
|
|
||||||
};
|
|
||||||
if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth };
|
if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth };
|
||||||
else constraints.video.height = { ideal: window.innerHeight };
|
else constraints.video.height = { ideal: window.innerHeight };
|
||||||
try {
|
try {
|
||||||
stream = await navigator.mediaDevices.getUserMedia(constraints);
|
stream = await navigator.mediaDevices.getUserMedia(constraints);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (err.name === 'PermissionDeniedError' || err.name === 'NotAllowedError') msg = 'camera permission denied';
|
if (err.name === 'PermissionDeniedError' || err.name === 'NotAllowedError') log(`Camera Error: camera permission denied: ${err.message || err}`);
|
||||||
else if (err.name === 'SourceUnavailableError') msg = 'camera not available';
|
if (err.name === 'SourceUnavailableError') log(`Camera Error: camera not available: ${err.message || err}`);
|
||||||
log(`Camera Error: ${msg}: ${err.message || err}`);
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
// @ts-ignore
|
if (stream) {
|
||||||
if (stream) video.srcObject = stream;
|
video.srcObject = stream;
|
||||||
else {
|
} else {
|
||||||
log('Camera Error: stream empty');
|
log('Camera Error: stream empty');
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
const track = stream.getVideoTracks()[0];
|
const track = stream.getVideoTracks()[0];
|
||||||
const settings = track.getSettings();
|
const settings = track.getSettings();
|
||||||
log(`Camera active: ${track.label} ${str(constraints)}`);
|
if (settings.deviceId) delete settings.deviceId;
|
||||||
|
if (settings.groupId) delete settings.groupId;
|
||||||
|
if (settings.aspectRatio) settings.aspectRatio = Math.trunc(100 * settings.aspectRatio) / 100;
|
||||||
|
log(`Camera active: ${track.label}`);
|
||||||
log(`Camera settings: ${str(settings)}`);
|
log(`Camera settings: ${str(settings)}`);
|
||||||
canvas.addEventListener('click', () => {
|
canvas.addEventListener('click', () => {
|
||||||
// @ts-ignore
|
|
||||||
if (video && video.readyState >= 2) {
|
if (video && video.readyState >= 2) {
|
||||||
// @ts-ignore
|
|
||||||
if (video.paused) {
|
if (video.paused) {
|
||||||
// @ts-ignore
|
|
||||||
video.play();
|
video.play();
|
||||||
detectVideo(video, canvas);
|
detectVideo(video, canvas);
|
||||||
} else {
|
} else {
|
||||||
// @ts-ignore
|
|
||||||
video.pause();
|
video.pause();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// @ts-ignore
|
|
||||||
log(`Camera state: ${video.paused ? 'paused' : 'playing'}`);
|
log(`Camera state: ${video.paused ? 'paused' : 'playing'}`);
|
||||||
});
|
});
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
video.onloadeddata = async () => {
|
video.onloadeddata = async () => {
|
||||||
// @ts-ignore
|
|
||||||
canvas.width = video.videoWidth;
|
canvas.width = video.videoWidth;
|
||||||
// @ts-ignore
|
|
||||||
canvas.height = video.videoHeight;
|
canvas.height = video.videoHeight;
|
||||||
// @ts-ignore
|
|
||||||
video.play();
|
video.play();
|
||||||
detectVideo(video, canvas);
|
detectVideo(video, canvas);
|
||||||
resolve(true);
|
resolve(true);
|
||||||
|
@ -152,18 +153,16 @@ async function setupCamera() {
|
||||||
|
|
||||||
async function setupFaceAPI() {
|
async function setupFaceAPI() {
|
||||||
// load face-api models
|
// load face-api models
|
||||||
log('Models loading');
|
// log('Models loading');
|
||||||
await faceapi.nets.tinyFaceDetector.load(modelPath);
|
// await faceapi.nets.tinyFaceDetector.load(modelPath); // using ssdMobilenetv1
|
||||||
await faceapi.nets.ssdMobilenetv1.load(modelPath);
|
await faceapi.nets.ssdMobilenetv1.load(modelPath);
|
||||||
await faceapi.nets.ageGenderNet.load(modelPath);
|
await faceapi.nets.ageGenderNet.load(modelPath);
|
||||||
await faceapi.nets.faceLandmark68Net.load(modelPath);
|
await faceapi.nets.faceLandmark68Net.load(modelPath);
|
||||||
await faceapi.nets.faceRecognitionNet.load(modelPath);
|
await faceapi.nets.faceRecognitionNet.load(modelPath);
|
||||||
await faceapi.nets.faceExpressionNet.load(modelPath);
|
await faceapi.nets.faceExpressionNet.load(modelPath);
|
||||||
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
|
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
|
||||||
|
|
||||||
// check tf engine state
|
// check tf engine state
|
||||||
const engine = await faceapi.tf.engine();
|
log(`Models loaded: ${str(faceapi.tf.engine().state.numTensors)} tensors`);
|
||||||
log(`Models loaded: ${str(engine.state)}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
|
@ -171,19 +170,21 @@ async function main() {
|
||||||
log('FaceAPI WebCam Test');
|
log('FaceAPI WebCam Test');
|
||||||
|
|
||||||
// if you want to use wasm backend location for wasm binaries must be specified
|
// if you want to use wasm backend location for wasm binaries must be specified
|
||||||
// await faceapi.tf.setWasmPaths('../node_modules/@tensorflow/tfjs-backend-wasm/dist/');
|
// await faceapi.tf?.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${faceapi.tf.version_core}/dist/`);
|
||||||
// await faceapi.tf.setBackend('wasm');
|
// await faceapi.tf?.setBackend('wasm');
|
||||||
|
// log(`WASM SIMD: ${await faceapi.tf?.env().getAsync('WASM_HAS_SIMD_SUPPORT')} Threads: ${await faceapi.tf?.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') ? 'Multi' : 'Single'}`);
|
||||||
|
|
||||||
// default is webgl backend
|
// default is webgl backend
|
||||||
await faceapi.tf.setBackend('webgl');
|
await faceapi.tf.setBackend('webgl');
|
||||||
|
|
||||||
await faceapi.tf.enableProdMode();
|
|
||||||
await faceapi.tf.ENV.set('DEBUG', false);
|
|
||||||
await faceapi.tf.ready();
|
await faceapi.tf.ready();
|
||||||
|
|
||||||
|
// tfjs optimizations
|
||||||
|
if (faceapi.tf?.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) faceapi.tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
|
||||||
|
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
|
||||||
|
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
|
||||||
|
|
||||||
// check version
|
// check version
|
||||||
log(`Version: FaceAPI ${str(faceapi?.version.faceapi || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
|
log(`Version: FaceAPI ${str(faceapi?.version || '(not loaded)')} TensorFlow/JS ${str(faceapi.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi.tf?.getBackend() || '(not loaded)')}`);
|
||||||
log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
|
|
||||||
|
|
||||||
await setupFaceAPI();
|
await setupFaceAPI();
|
||||||
await setupCamera();
|
await setupCamera();
|
|
@ -0,0 +1 @@
|
||||||
|
export * from '../types/face-api';
|
|
@ -0,0 +1 @@
|
||||||
|
export * from '../types/face-api';
|
|
@ -0,0 +1 @@
|
||||||
|
export * from '../types/face-api';
|
|
@ -0,0 +1 @@
|
||||||
|
export * from '../types/face-api';
|
|
@ -0,0 +1 @@
|
||||||
|
export * from '../types/face-api';
|
|
@ -0,0 +1 @@
|
||||||
|
export * from '../types/face-api';
|
|
@ -0,0 +1,28 @@
|
||||||
|
/*
|
||||||
|
import '@tensorflow/tfjs-core';
|
||||||
|
import '@tensorflow/tfjs-core/dist/types';
|
||||||
|
import '@tensorflow/tfjs-core/dist/register_all_gradients';
|
||||||
|
import '@tensorflow/tfjs-core/dist/public/chained_ops/register_all_chained_ops';
|
||||||
|
import '@tensorflow/tfjs-data';
|
||||||
|
import '@tensorflow/tfjs-layers';
|
||||||
|
import '@tensorflow/tfjs-converter';
|
||||||
|
import '@tensorflow/tfjs-backend-cpu';
|
||||||
|
import '@tensorflow/tfjs-backend-webgl';
|
||||||
|
import '@tensorflow/tfjs-backend-wasm';
|
||||||
|
import '@tensorflow/tfjs-backend-webgpu';
|
||||||
|
*/
|
||||||
|
|
||||||
|
export declare const version: {
|
||||||
|
'tfjs-core': string;
|
||||||
|
'tfjs-backend-cpu': string;
|
||||||
|
'tfjs-backend-webgl': string;
|
||||||
|
'tfjs-data': string;
|
||||||
|
'tfjs-layers': string;
|
||||||
|
'tfjs-converter': string;
|
||||||
|
tfjs: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export { io, browser, image } from '@tensorflow/tfjs-core';
|
||||||
|
export { tensor, tidy, softmax, unstack, relu, add, conv2d, cast, zeros, concat, avgPool, stack, fill, transpose, tensor1d, tensor2d, tensor3d, tensor4d, maxPool, matMul, mul, sub, scalar } from '@tensorflow/tfjs-core';
|
||||||
|
export { div, pad, slice, reshape, slice3d, expandDims, depthwiseConv2d, separableConv2d, sigmoid, exp, tile, batchNorm, clipByValue } from '@tensorflow/tfjs-core';
|
||||||
|
export { ENV, Variable, Tensor, TensorLike, Rank, Tensor1D, Tensor2D, Tensor3D, Tensor4D, Tensor5D, NamedTensorMap } from '@tensorflow/tfjs-core';
|
|
@ -0,0 +1,9 @@
|
||||||
|
export declare const version: {
|
||||||
|
'tfjs-core': string;
|
||||||
|
'tfjs-backend-cpu': string;
|
||||||
|
'tfjs-backend-webgl': string;
|
||||||
|
'tfjs-data': string;
|
||||||
|
'tfjs-layers': string;
|
||||||
|
'tfjs-converter': string;
|
||||||
|
tfjs: string;
|
||||||
|
};
|
|
@ -0,0 +1,7 @@
|
||||||
|
/*
|
||||||
|
Face-API
|
||||||
|
homepage: <https://github.com/vladmandic/face-api>
|
||||||
|
author: <https://github.com/vladmandic>'
|
||||||
|
*/
|
||||||
|
|
||||||
|
var e="4.22.0";var s="4.22.0";var t="4.22.0";var n="4.22.0";var i="4.22.0";var w={tfjs:e,"tfjs-core":e,"tfjs-converter":s,"tfjs-backend-cpu":t,"tfjs-backend-webgl":n,"tfjs-backend-wasm":i};export{w as version};
|
|
@ -1,13 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta content="text/html">
|
|
||||||
<title>FaceAPI Static Images Demo</title>
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, shrink-to-fit=yes">
|
|
||||||
<script src="./index.js" type="module"></script>
|
|
||||||
</head>
|
|
||||||
<body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0;">
|
|
||||||
<div id="log"></div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,84 +0,0 @@
|
||||||
// @ts-nocheck
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const process = require('process');
|
|
||||||
const path = require('path');
|
|
||||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
|
||||||
const log = require('@vladmandic/pilogger');
|
|
||||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
|
||||||
const tf = require('@tensorflow/tfjs-node');
|
|
||||||
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
|
|
||||||
|
|
||||||
const modelPathRoot = '../model';
|
|
||||||
const imgPathRoot = './example'; // modify to include your sample images
|
|
||||||
const minScore = 0.1;
|
|
||||||
const maxResults = 5;
|
|
||||||
let optionsSSDMobileNet;
|
|
||||||
|
|
||||||
async function image(img) {
|
|
||||||
const buffer = fs.readFileSync(img);
|
|
||||||
const decoded = tf.node.decodeImage(buffer);
|
|
||||||
const casted = decoded.toFloat();
|
|
||||||
const result = casted.expandDims(0);
|
|
||||||
decoded.dispose();
|
|
||||||
casted.dispose();
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function detect(tensor) {
|
|
||||||
const result = await faceapi
|
|
||||||
.detectAllFaces(tensor, optionsSSDMobileNet)
|
|
||||||
.withFaceLandmarks()
|
|
||||||
.withFaceExpressions()
|
|
||||||
.withFaceDescriptors()
|
|
||||||
.withAgeAndGender();
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
log.header();
|
|
||||||
log.info('FaceAPI single-process test');
|
|
||||||
|
|
||||||
await faceapi.tf.setBackend('tensorflow');
|
|
||||||
await faceapi.tf.enableProdMode();
|
|
||||||
await faceapi.tf.ENV.set('DEBUG', false);
|
|
||||||
await faceapi.tf.ready();
|
|
||||||
|
|
||||||
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
|
|
||||||
|
|
||||||
log.info('Loading FaceAPI models');
|
|
||||||
const modelPath = path.join(__dirname, modelPathRoot);
|
|
||||||
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
|
|
||||||
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
|
|
||||||
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
|
|
||||||
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
|
|
||||||
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
|
|
||||||
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
|
|
||||||
|
|
||||||
if (process.argv.length !== 3) {
|
|
||||||
const t0 = process.hrtime.bigint();
|
|
||||||
const dir = fs.readdirSync(imgPathRoot);
|
|
||||||
for (const img of dir) {
|
|
||||||
if (!img.toLocaleLowerCase().endsWith('.jpg')) continue;
|
|
||||||
const tensor = await image(path.join(imgPathRoot, img));
|
|
||||||
const result = await detect(tensor);
|
|
||||||
log.data('Image:', img, 'Detected faces:', result.length);
|
|
||||||
tensor.dispose();
|
|
||||||
}
|
|
||||||
const t1 = process.hrtime.bigint();
|
|
||||||
log.info('Processed', dir.length, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
|
|
||||||
} else {
|
|
||||||
const param = process.argv[2];
|
|
||||||
if (fs.existsSync(param)) {
|
|
||||||
const tensor = await image(param);
|
|
||||||
const result = await detect(tensor);
|
|
||||||
log.data('Image:', param, 'Detected faces:', result.length);
|
|
||||||
for (const i of result) {
|
|
||||||
log.data('Gender:', i.genderProbability, i.gender, 'Age:', i.age);
|
|
||||||
}
|
|
||||||
tensor.dispose();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
main();
|
|
|
@ -1,15 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta content="text/html">
|
|
||||||
<title>FaceAPI Live WebCam Demo</title>
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, shrink-to-fit=yes">
|
|
||||||
<script src="./webcam.js" type="module"></script>
|
|
||||||
</head>
|
|
||||||
<body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0; overflow: hidden">
|
|
||||||
<video id="video" playsinline class="video"></video>
|
|
||||||
<canvas id="canvas" class="canvas" style="position: fixed; top: 0; left: 0; z-index: 10"></canvas>
|
|
||||||
<div id="log" style="overflow-y: scroll; height: 16.5rem"></div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1 +1,39 @@
|
||||||
[{"weights":[{"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008194216092427571,"min":-0.9423348506291708}},{"name":"dense0/conv0/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006839508168837603,"min":-0.8412595047670252}},{"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009194007106855804,"min":-1.2779669878529567}},{"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036026100317637128,"min":-0.3170296827952067}},{"name":"dense0/conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000740380117706224,"min":-0.06367269012273527}},{"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},{"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},{"name":"dense0/conv2/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0037702228508743585,"min":-0.6220867703942692}},{"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0033707996209462483,"min":-0.421349952618281}},{"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014611541991140328,"min":-1.8556658328748217}},{"name":"dense1/conv0/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002832523046755323,"min":-0.30307996600281956}},{"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006593170586754294,"min":-0.6329443763284123}},{"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.012215249211180444,"min":-1.6001976466646382}},{"name":"dense1/conv1/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002384825547536214,"min":-0.3028728445370992}},{"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005859645441466687,"min":-0.7617539073906693}},{"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013121426806730382,"min":-1.7845140457153321}},{"name":"dense1/conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032247188044529336,"min":-0.46435950784122243}},{"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002659512618008782,"min":-0.32977956463308894}},{"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015499923743453681,"min":-1.9839902391620712}},{"name":"dense2/conv0/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032450980999890497,"min":-0.522460794098237}},{"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005911862382701799,"min":-0.792189559282041}},{"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021025861478319356,"min":-2.2077154552235325}},{"name":"dense2/conv1/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00349616945958605,"min":-0.46149436866535865}},{"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008104994250278847,"min":-1.013124281284856}},{"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029337059282789044,"min":-3.5791212325002633}},{"name":"dense2/conv2/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0038808938334969913,"min":-0.4230174278511721}},{"name":"fc/weights","shape":[128,136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014016061670639936,"min":-1.8921683255363912}},{"name":"fc/bias","shape":[136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029505149698724935,"min":0.088760145008564}}],"paths":["face_landmark_68_tiny_model-shard1"]}]
|
[
|
||||||
|
{
|
||||||
|
"weights":
|
||||||
|
[
|
||||||
|
{"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008194216092427571,"min":-0.9423348506291708}},
|
||||||
|
{"name":"dense0/conv0/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006839508168837603,"min":-0.8412595047670252}},
|
||||||
|
{"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009194007106855804,"min":-1.2779669878529567}},
|
||||||
|
{"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036026100317637128,"min":-0.3170296827952067}},
|
||||||
|
{"name":"dense0/conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000740380117706224,"min":-0.06367269012273527}},
|
||||||
|
{"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},
|
||||||
|
{"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},
|
||||||
|
{"name":"dense0/conv2/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0037702228508743585,"min":-0.6220867703942692}},
|
||||||
|
{"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0033707996209462483,"min":-0.421349952618281}},
|
||||||
|
{"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014611541991140328,"min":-1.8556658328748217}},
|
||||||
|
{"name":"dense1/conv0/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002832523046755323,"min":-0.30307996600281956}},
|
||||||
|
{"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006593170586754294,"min":-0.6329443763284123}},
|
||||||
|
{"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.012215249211180444,"min":-1.6001976466646382}},
|
||||||
|
{"name":"dense1/conv1/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002384825547536214,"min":-0.3028728445370992}},
|
||||||
|
{"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005859645441466687,"min":-0.7617539073906693}},
|
||||||
|
{"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013121426806730382,"min":-1.7845140457153321}},
|
||||||
|
{"name":"dense1/conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032247188044529336,"min":-0.46435950784122243}},
|
||||||
|
{"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002659512618008782,"min":-0.32977956463308894}},
|
||||||
|
{"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015499923743453681,"min":-1.9839902391620712}},
|
||||||
|
{"name":"dense2/conv0/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032450980999890497,"min":-0.522460794098237}},
|
||||||
|
{"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005911862382701799,"min":-0.792189559282041}},
|
||||||
|
{"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021025861478319356,"min":-2.2077154552235325}},
|
||||||
|
{"name":"dense2/conv1/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00349616945958605,"min":-0.46149436866535865}},
|
||||||
|
{"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008104994250278847,"min":-1.013124281284856}},
|
||||||
|
{"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029337059282789044,"min":-3.5791212325002633}},
|
||||||
|
{"name":"dense2/conv2/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0038808938334969913,"min":-0.4230174278511721}},
|
||||||
|
{"name":"fc/weights","shape":[128,136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014016061670639936,"min":-1.8921683255363912}},
|
||||||
|
{"name":"fc/bias","shape":[136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029505149698724935,"min":0.088760145008564}}
|
||||||
|
],
|
||||||
|
"paths":
|
||||||
|
[
|
||||||
|
"face_landmark_68_tiny_model.bin"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
|
@ -1 +1,30 @@
|
||||||
[{"weights":[{"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009007044399485869,"min":-1.2069439495311063}},{"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005263455241334205,"min":-0.9211046672334858}},{"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004001977630690033,"min":-0.5042491814669441}},{"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013836609615999109,"min":-1.411334180831909}},{"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015159862590771096,"min":-0.30926119685173037}},{"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002666276225856706,"min":-0.317286870876948}},{"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015265831292844286,"min":-1.6792414422128714}},{"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0020280554598453,"min":-0.37113414915168985}},{"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006100742489683862,"min":-0.8907084034938438}},{"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016276211832083907,"min":-2.0508026908425725}},{"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394414279975143,"min":-0.7637432129944072}},{"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006716050119961009,"min":-0.8059260143953211}},{"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021875603993733724,"min":-2.8875797271728514}},{"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0041141652009066415,"min":-0.8187188749804216}},{"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008423839597141042,"min":-0.9013508368940915}},{"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.030007277283014035,"min":-3.8709387695088107}},{"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008402082966823203,"min":-1.4871686851277068}},{"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.028336129469030042,"min":-4.675461362389957}},{"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002268134028303857,"min":-0.41053225912299807}}],"paths":["tiny_face_detector_model-shard1"]}]
|
[
|
||||||
|
{
|
||||||
|
"weights":
|
||||||
|
[
|
||||||
|
{"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009007044399485869,"min":-1.2069439495311063}},
|
||||||
|
{"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005263455241334205,"min":-0.9211046672334858}},
|
||||||
|
{"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004001977630690033,"min":-0.5042491814669441}},
|
||||||
|
{"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013836609615999109,"min":-1.411334180831909}},
|
||||||
|
{"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015159862590771096,"min":-0.30926119685173037}},
|
||||||
|
{"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002666276225856706,"min":-0.317286870876948}},
|
||||||
|
{"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015265831292844286,"min":-1.6792414422128714}},
|
||||||
|
{"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0020280554598453,"min":-0.37113414915168985}},
|
||||||
|
{"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006100742489683862,"min":-0.8907084034938438}},
|
||||||
|
{"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016276211832083907,"min":-2.0508026908425725}},
|
||||||
|
{"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394414279975143,"min":-0.7637432129944072}},
|
||||||
|
{"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006716050119961009,"min":-0.8059260143953211}},
|
||||||
|
{"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021875603993733724,"min":-2.8875797271728514}},
|
||||||
|
{"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0041141652009066415,"min":-0.8187188749804216}},
|
||||||
|
{"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008423839597141042,"min":-0.9013508368940915}},
|
||||||
|
{"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.030007277283014035,"min":-3.8709387695088107}},
|
||||||
|
{"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008402082966823203,"min":-1.4871686851277068}},
|
||||||
|
{"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.028336129469030042,"min":-4.675461362389957}},
|
||||||
|
{"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002268134028303857,"min":-0.41053225912299807}}
|
||||||
|
],
|
||||||
|
"paths":
|
||||||
|
[
|
||||||
|
"tiny_face_detector_model.bin"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
86
package.json
|
@ -1,65 +1,79 @@
|
||||||
{
|
{
|
||||||
"name": "@vladmandic/face-api",
|
"name": "@vladmandic/face-api",
|
||||||
"version": "1.0.1",
|
"version": "1.7.15",
|
||||||
"description": "FaceAPI: AI-powered Face Detection, Face Embedding & Recognition Using Tensorflow/JS",
|
"description": "FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS",
|
||||||
"sideEffects": false,
|
"sideEffects": false,
|
||||||
"main": "dist/face-api.node.js",
|
"main": "dist/face-api.node.js",
|
||||||
"module": "dist/face-api.esm.js",
|
"module": "dist/face-api.esm.js",
|
||||||
"browser": "dist/face-api.esm.js",
|
"browser": "dist/face-api.esm.js",
|
||||||
"types": "types/index.d.ts",
|
"types": "types/face-api.d.ts",
|
||||||
"author": "Vladimir Mandic <mandic00@live.com>",
|
"author": "Vladimir Mandic <mandic00@live.com>",
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/vladmandic/face-api/issues"
|
"url": "https://github.com/vladmandic/face-api/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://vladmandic.github.io/face-api/example/webcam.html",
|
"homepage": "https://vladmandic.github.io/face-api/demo/webcam.html",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12.0.0"
|
"node": ">=14.0.0"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/vladmandic/face-api.git"
|
"url": "git+https://github.com/vladmandic/face-api.git"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "node --trace-warnings example/node-singleprocess.js",
|
"start": "node --no-warnings demo/node.js",
|
||||||
"dev": "npm install && node server/serve.js",
|
"build": "node build.js",
|
||||||
"build": "rimraf dist/* types/* && node server/build.js && node server/changelog.js",
|
"dev": "build --profile development",
|
||||||
"lint": "eslint src/**/* example/*.js server/*.js",
|
"lint": "eslint src/ demo/",
|
||||||
"test": "eslint src/**/* example/*.js server/*.js"
|
"test": "node --trace-warnings test/test-node.js",
|
||||||
|
"scan": "npx auditjs@latest ossi --dev --quiet"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"tensorflow",
|
|
||||||
"tf",
|
|
||||||
"tfjs",
|
|
||||||
"face",
|
|
||||||
"face-api",
|
"face-api",
|
||||||
|
"faceapi",
|
||||||
"face-detection",
|
"face-detection",
|
||||||
"age-gender",
|
"age-gender",
|
||||||
"emotion-detection",
|
"emotion-detection",
|
||||||
"face-recognition"
|
"face-recognition",
|
||||||
|
"face",
|
||||||
|
"face-description",
|
||||||
|
"tensorflow",
|
||||||
|
"tensorflowjs",
|
||||||
|
"tfjs"
|
||||||
],
|
],
|
||||||
"peerDependencies": {},
|
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@tensorflow/tfjs": "^3.2.0",
|
"@canvas/image": "^2.0.0",
|
||||||
"@tensorflow/tfjs-backend-wasm": "^3.2.0",
|
"@microsoft/api-extractor": "^7.49.2",
|
||||||
"@tensorflow/tfjs-node": "^3.2.0",
|
"@tensorflow/tfjs": "^4.22.0",
|
||||||
"@tensorflow/tfjs-node-gpu": "^3.2.0",
|
"@tensorflow/tfjs-backend-cpu": "^4.22.0",
|
||||||
"@types/node": "^14.14.33",
|
"@tensorflow/tfjs-backend-wasm": "^4.22.0",
|
||||||
"@typescript-eslint/eslint-plugin": "^4.17.0",
|
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
|
||||||
"@typescript-eslint/parser": "^4.17.0",
|
"@tensorflow/tfjs-backend-webgpu": "4.22.0",
|
||||||
"@vladmandic/pilogger": "^0.2.14",
|
"@tensorflow/tfjs-converter": "^4.22.0",
|
||||||
"chokidar": "^3.5.1",
|
"@tensorflow/tfjs-core": "^4.22.0",
|
||||||
"dayjs": "^1.10.4",
|
"@tensorflow/tfjs-data": "^4.22.0",
|
||||||
"esbuild": "^0.9.0",
|
"@tensorflow/tfjs-layers": "^4.22.0",
|
||||||
"eslint": "^7.21.0",
|
"@tensorflow/tfjs-node": "^4.22.0",
|
||||||
"eslint-config-airbnb-base": "^14.2.1",
|
"@tensorflow/tfjs-node-gpu": "^4.22.0",
|
||||||
"eslint-plugin-import": "^2.22.1",
|
"@types/node": "^22.13.1",
|
||||||
"eslint-plugin-json": "^2.1.2",
|
"@types/offscreencanvas": "^2019.7.3",
|
||||||
|
"@typescript-eslint/eslint-plugin": "^8.5.0",
|
||||||
|
"@typescript-eslint/parser": "^8.5.0",
|
||||||
|
"@vladmandic/build": "^0.10.2",
|
||||||
|
"@vladmandic/pilogger": "^0.5.1",
|
||||||
|
"ajv": "^8.17.1",
|
||||||
|
"esbuild": "^0.24.2",
|
||||||
|
"eslint": "8.57.0",
|
||||||
|
"eslint-config-airbnb-base": "^15.0.0",
|
||||||
|
"eslint-plugin-import": "^2.30.0",
|
||||||
|
"eslint-plugin-json": "^4.0.1",
|
||||||
"eslint-plugin-node": "^11.1.0",
|
"eslint-plugin-node": "^11.1.0",
|
||||||
"eslint-plugin-promise": "^4.3.1",
|
"eslint-plugin-promise": "^7.1.0",
|
||||||
"rimraf": "^3.0.2",
|
"node-fetch": "^3.3.2",
|
||||||
"simple-git": "^2.36.1",
|
"rimraf": "^6.0.1",
|
||||||
"tslib": "^2.1.0",
|
"seedrandom": "^3.0.5",
|
||||||
"typescript": "^4.2.3"
|
"tslib": "^2.8.1",
|
||||||
|
"typedoc": "^0.27.6",
|
||||||
|
"typescript": "5.7.3"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
236
server/build.js
|
@ -1,236 +0,0 @@
|
||||||
#!/usr/bin/env -S node --trace-warnings
|
|
||||||
|
|
||||||
/* eslint-disable import/no-extraneous-dependencies */
|
|
||||||
/* eslint-disable node/no-unpublished-require */
|
|
||||||
/* eslint-disable node/shebang */
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const esbuild = require('esbuild');
|
|
||||||
const ts = require('typescript');
|
|
||||||
const log = require('@vladmandic/pilogger');
|
|
||||||
|
|
||||||
// keeps esbuild service instance cached
|
|
||||||
let es;
|
|
||||||
const banner = `
|
|
||||||
/*
|
|
||||||
Face-API
|
|
||||||
homepage: <https://github.com/vladmandic/face-api>
|
|
||||||
author: <https://github.com/vladmandic>'
|
|
||||||
*/
|
|
||||||
`;
|
|
||||||
|
|
||||||
// tsc configuration
|
|
||||||
const tsconfig = {
|
|
||||||
noEmitOnError: false,
|
|
||||||
target: ts.ScriptTarget.ES2018,
|
|
||||||
module: ts.ModuleKind.ES2020,
|
|
||||||
// outFile: "dist/face-api.d.ts",
|
|
||||||
outDir: 'types/',
|
|
||||||
declaration: true,
|
|
||||||
emitDeclarationOnly: true,
|
|
||||||
emitDecoratorMetadata: true,
|
|
||||||
experimentalDecorators: true,
|
|
||||||
skipLibCheck: true,
|
|
||||||
strictNullChecks: true,
|
|
||||||
baseUrl: './',
|
|
||||||
paths: {
|
|
||||||
tslib: ['node_modules/tslib/tslib.d.ts'],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// common configuration
|
|
||||||
const common = {
|
|
||||||
banner,
|
|
||||||
minifyWhitespace: true,
|
|
||||||
minifyIdentifiers: true,
|
|
||||||
minifySyntax: true,
|
|
||||||
bundle: true,
|
|
||||||
sourcemap: true,
|
|
||||||
logLevel: 'error',
|
|
||||||
target: 'es2018',
|
|
||||||
// tsconfig: './tsconfig.json',
|
|
||||||
};
|
|
||||||
|
|
||||||
const targets = {
|
|
||||||
node: {
|
|
||||||
tfjs: {
|
|
||||||
platform: 'node',
|
|
||||||
format: 'cjs',
|
|
||||||
metafile: 'dist/tfjs.esm.json',
|
|
||||||
entryPoints: ['src/tfjs/tf-node.ts'],
|
|
||||||
outfile: 'dist/tfjs.esm.js',
|
|
||||||
external: ['@tensorflow'],
|
|
||||||
},
|
|
||||||
node: {
|
|
||||||
platform: 'node',
|
|
||||||
format: 'cjs',
|
|
||||||
metafile: 'dist/face-api.node.json',
|
|
||||||
entryPoints: ['src/index.ts'],
|
|
||||||
outfile: 'dist/face-api.node.js',
|
|
||||||
external: ['@tensorflow'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
nodeGPU: {
|
|
||||||
tfjs: {
|
|
||||||
platform: 'node',
|
|
||||||
format: 'cjs',
|
|
||||||
entryPoints: ['src/tfjs/tf-node-gpu.ts'],
|
|
||||||
outfile: 'dist/tfjs.esm.js',
|
|
||||||
metafile: 'dist/tfjs.esm.json',
|
|
||||||
external: ['@tensorflow'],
|
|
||||||
},
|
|
||||||
node: {
|
|
||||||
platform: 'node',
|
|
||||||
format: 'cjs',
|
|
||||||
entryPoints: ['src/index.ts'],
|
|
||||||
outfile: 'dist/face-api.node-gpu.js',
|
|
||||||
metafile: 'dist/face-api.node-gpu.json',
|
|
||||||
external: ['@tensorflow'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
nodeCPU: {
|
|
||||||
tfjs: {
|
|
||||||
platform: 'node',
|
|
||||||
format: 'cjs',
|
|
||||||
metafile: 'dist/tfjs.esm.json',
|
|
||||||
entryPoints: ['src/tfjs/tf-node-cpu.ts'],
|
|
||||||
outfile: 'dist/tfjs.esm.js',
|
|
||||||
external: ['@tensorflow'],
|
|
||||||
},
|
|
||||||
node: {
|
|
||||||
platform: 'node',
|
|
||||||
format: 'cjs',
|
|
||||||
metafile: 'dist/face-api.node-cpu.json',
|
|
||||||
entryPoints: ['src/index.ts'],
|
|
||||||
outfile: 'dist/face-api.node-cpu.js',
|
|
||||||
external: ['@tensorflow'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
browserNoBundle: {
|
|
||||||
tfjs: {
|
|
||||||
platform: 'browser',
|
|
||||||
format: 'esm',
|
|
||||||
entryPoints: ['src/tfjs/tf-browser.ts'],
|
|
||||||
outfile: 'dist/tfjs.esm.js',
|
|
||||||
metafile: 'dist/tfjs.esm.json',
|
|
||||||
external: ['fs', 'buffer', 'util', 'os', '@tensorflow'],
|
|
||||||
},
|
|
||||||
esm: {
|
|
||||||
platform: 'browser',
|
|
||||||
format: 'esm',
|
|
||||||
entryPoints: ['src/index.ts'],
|
|
||||||
outfile: 'dist/face-api.esm-nobundle.js',
|
|
||||||
metafile: 'dist/face-api.esm-nobundle.json',
|
|
||||||
external: ['fs', 'buffer', 'util', 'os', '@tensorflow', 'tfjs.esm.js'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
browserBundle: {
|
|
||||||
tfjs: {
|
|
||||||
platform: 'browser',
|
|
||||||
format: 'esm',
|
|
||||||
entryPoints: ['src/tfjs/tf-browser.ts'],
|
|
||||||
outfile: 'dist/tfjs.esm.js',
|
|
||||||
metafile: 'dist/tfjs.esm.json',
|
|
||||||
external: ['fs', 'buffer', 'util', 'os'],
|
|
||||||
},
|
|
||||||
iife: {
|
|
||||||
platform: 'browser',
|
|
||||||
format: 'iife',
|
|
||||||
globalName: 'faceapi',
|
|
||||||
entryPoints: ['src/index.ts'],
|
|
||||||
outfile: 'dist/face-api.js',
|
|
||||||
metafile: 'dist/face-api.json',
|
|
||||||
external: ['fs', 'buffer', 'util', 'os'],
|
|
||||||
},
|
|
||||||
esm: {
|
|
||||||
platform: 'browser',
|
|
||||||
format: 'esm',
|
|
||||||
entryPoints: ['src/index.ts'],
|
|
||||||
outfile: 'dist/face-api.esm.js',
|
|
||||||
metafile: 'dist/face-api.esm.json',
|
|
||||||
external: ['fs', 'buffer', 'util', 'os'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
async function getStats(metafile) {
|
|
||||||
const stats = {};
|
|
||||||
if (!fs.existsSync(metafile)) return stats;
|
|
||||||
const data = fs.readFileSync(metafile);
|
|
||||||
const json = JSON.parse(data.toString());
|
|
||||||
if (json && json.inputs && json.outputs) {
|
|
||||||
for (const [key, val] of Object.entries(json.inputs)) {
|
|
||||||
if (key.startsWith('node_modules')) {
|
|
||||||
stats.modules = (stats.modules || 0) + 1;
|
|
||||||
stats.moduleBytes = (stats.moduleBytes || 0) + val.bytes;
|
|
||||||
} else {
|
|
||||||
stats.imports = (stats.imports || 0) + 1;
|
|
||||||
stats.importBytes = (stats.importBytes || 0) + val.bytes;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const files = [];
|
|
||||||
for (const [key, val] of Object.entries(json.outputs)) {
|
|
||||||
if (!key.endsWith('.map')) {
|
|
||||||
files.push(key);
|
|
||||||
stats.outputBytes = (stats.outputBytes || 0) + val.bytes;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stats.outputFiles = files.join(', ');
|
|
||||||
}
|
|
||||||
return stats;
|
|
||||||
}
|
|
||||||
|
|
||||||
function compile(fileNames, options) {
|
|
||||||
log.info('Compile typings:', fileNames);
|
|
||||||
const program = ts.createProgram(fileNames, options);
|
|
||||||
const emit = program.emit();
|
|
||||||
const diag = ts
|
|
||||||
.getPreEmitDiagnostics(program)
|
|
||||||
.concat(emit.diagnostics);
|
|
||||||
for (const info of diag) {
|
|
||||||
// @ts-ignore
|
|
||||||
const msg = info.messageText.messageText || info.messageText;
|
|
||||||
if (msg.includes('package.json')) continue;
|
|
||||||
if (msg.includes('Expected 0 arguments, but got 1')) continue;
|
|
||||||
if (info.file) {
|
|
||||||
const pos = info.file.getLineAndCharacterOfPosition(info.start || 0);
|
|
||||||
log.error(`TSC: ${info.file.fileName} [${pos.line + 1},${pos.character + 1}]:`, msg);
|
|
||||||
} else {
|
|
||||||
log.error('TSC:', msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// rebuild on file change
|
|
||||||
async function build(f, msg) {
|
|
||||||
log.info('Build: file', msg, f, 'target:', common.target);
|
|
||||||
if (!es) es = await esbuild.startService();
|
|
||||||
try {
|
|
||||||
// rebuild all target groups and types
|
|
||||||
for (const [targetGroupName, targetGroup] of Object.entries(targets)) {
|
|
||||||
for (const [targetName, targetOptions] of Object.entries(targetGroup)) {
|
|
||||||
// if triggered from watch mode, rebuild only browser bundle
|
|
||||||
if ((require.main !== module) && (targetGroupName !== 'browserBundle')) continue;
|
|
||||||
await es.build({ ...common, ...targetOptions });
|
|
||||||
const stats = await getStats(targetOptions.metafile);
|
|
||||||
log.state(`Build for: ${targetGroupName} type: ${targetName}:`, stats);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
// catch errors and print where it occured
|
|
||||||
log.error('Build error', JSON.stringify(err.errors || err, null, 2));
|
|
||||||
if (require.main === module) process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// generate typings
|
|
||||||
compile(targets.browserBundle.esm.entryPoints, tsconfig);
|
|
||||||
|
|
||||||
if (require.main === module) process.exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (require.main === module) {
|
|
||||||
log.header();
|
|
||||||
build('all', 'startup');
|
|
||||||
} else {
|
|
||||||
exports.build = build;
|
|
||||||
}
|
|
|
@ -1,59 +0,0 @@
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const dayjs = require('dayjs');
|
|
||||||
const simpleGit = require('simple-git/promise');
|
|
||||||
const logger = require('@vladmandic/pilogger');
|
|
||||||
const app = require('../package.json');
|
|
||||||
|
|
||||||
const git = simpleGit();
|
|
||||||
|
|
||||||
let text = `# ${app.name}
|
|
||||||
|
|
||||||
Version: **${app.version}**
|
|
||||||
Description: **${app.description}**
|
|
||||||
|
|
||||||
Author: **${app.author}**
|
|
||||||
License: **${app.license}** </LICENSE>
|
|
||||||
Repository: **<${app.repository.url}>**
|
|
||||||
|
|
||||||
## Changelog
|
|
||||||
`;
|
|
||||||
|
|
||||||
async function update(f) {
|
|
||||||
const gitLog = await git.log();
|
|
||||||
// @ts-ignore
|
|
||||||
const log = gitLog.all.sort((a, b) => (new Date(b.date).getTime() - new Date(a.date).getTime()));
|
|
||||||
|
|
||||||
let previous = '';
|
|
||||||
const headings = [];
|
|
||||||
for (const l of log) {
|
|
||||||
const msg = l.message.toLowerCase();
|
|
||||||
if ((l.refs !== '') || msg.match(/^[0-99].[0-99].[0-99]/)) {
|
|
||||||
const dt = dayjs(l.date).format('YYYY/MM/DD');
|
|
||||||
let ver = msg.match(/[0-99].[0-99].[0-99]/) ? msg : l.refs;
|
|
||||||
ver = ver.replace('tag: v', '').replace('tag: ', 'release: ').split(',')[0];
|
|
||||||
const heading = `\n### **${ver}** ${dt} ${l.author_email}\n\n`;
|
|
||||||
if (!headings.includes(heading) && !ver.startsWith('tag')) {
|
|
||||||
headings.push(heading);
|
|
||||||
text += heading;
|
|
||||||
}
|
|
||||||
} else if ((msg.length > 2) && !msg.startsWith('update') && (previous !== msg)) {
|
|
||||||
previous = msg;
|
|
||||||
text += `- ${msg}\n`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const name = path.join(__dirname, f);
|
|
||||||
fs.writeFileSync(name, text);
|
|
||||||
logger.state('Change log updated:', name);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.update = update;
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (require.main === module) {
|
|
||||||
update('../CHANGELOG.md');
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
//
|
|
||||||
}
|
|
|
@ -1,31 +0,0 @@
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIFazCCA1OgAwIBAgIUKQKodDBJnuweJs5IcTyL4NIp3vgwDQYJKoZIhvcNAQEL
|
|
||||||
BQAwRTELMAkGA1UEBhMCVVMxEDAOBgNVBAgMB0Zsb3JpZGExDjAMBgNVBAcMBU1p
|
|
||||||
YW1pMRQwEgYDVQQKDAtAdmxhZG1hbmRpYzAeFw0yMDExMDcxNTE3NDNaFw0yMTEx
|
|
||||||
MDcxNTE3NDNaMEUxCzAJBgNVBAYTAlVTMRAwDgYDVQQIDAdGbG9yaWRhMQ4wDAYD
|
|
||||||
VQQHDAVNaWFtaTEUMBIGA1UECgwLQHZsYWRtYW5kaWMwggIiMA0GCSqGSIb3DQEB
|
|
||||||
AQUAA4ICDwAwggIKAoICAQDSC88PF8NyLkagK5mAZ/d739SOU16l2Cx3zE35zZQh
|
|
||||||
O29+1L4L+oMksLYipo+FMgtGO+MSzFsvGgKCs2sDSdfyoNSTZ3QaN4BAZ0sbq+wL
|
|
||||||
cke7yRBTM/XIGOQfhqq8yC2q8/zXwUbZg0UsCAxDGNwUr0Qlm829laIU/UN1KcYS
|
|
||||||
57Nebl1z05wMEvYmyl4JBAl9ozne7KS9DyW7jbrAXE8TaEy3+pY66kx5GG6v2+up
|
|
||||||
ScITGm4YPmPPlpOF1UjQloosgxdVa+fVp8aNCa/rf0JNO0Uhb3OKOZ+4kYmpfPn/
|
|
||||||
trwoKWAa6CV1uAJ+3zDkLMq1JNlrV4OMp1QvX0wzA47a/n466JMN9SFb0Ng5wf19
|
|
||||||
VOtT5Zu7chDStBudVjxlMDfUixvhvn4sjbaLNYR1fyWPoNXwr0KX2lpTP1QOzp9/
|
|
||||||
Sd0iiJ8RPfXn8Xo26MStu4I52CZjS7yEMgJGCLH/mgPuSbrHHYYrrrCPJgmQOZG2
|
|
||||||
TNMI+EqOwQvHh2ghdv7t7EEk4IslBk0QzufMXQ2WFXQ20nvj74mrmmiMuBcmonpR
|
|
||||||
0egA5/M18ZPLQxYu0Q86NUr4XHtAG1fq+n8pseQ7Avy6Gk6HRiezCbB7TJ9rnNeu
|
|
||||||
jie1TDajC6W7rx0VF7hcxkIrDgNgnYcjXUV2hMx1lo4fIoWkL3nJJVEthMVIcJOX
|
|
||||||
EwIDAQABo1MwUTAdBgNVHQ4EFgQUHawIRAo1bW8Xy7l4oKfM+ESjhs0wHwYDVR0j
|
|
||||||
BBgwFoAUHawIRAo1bW8Xy7l4oKfM+ESjhs0wDwYDVR0TAQH/BAUwAwEB/zANBgkq
|
|
||||||
hkiG9w0BAQsFAAOCAgEAozQJk5Ahx7rDn/aMXLdZFxR81VfkmHDm7NhlJsdVKUx5
|
|
||||||
o/iegXnvwc1PoeKsz2S504QiuL8l7jqZoU2WPIm7Vlr+oxBgiKqjo1EqBsUgNCZ7
|
|
||||||
qxMD84TVp/KBGjKUh1TXhjJwGGfNNr+R/fJGw+36UeuY3fSckjaYTuNuVElp+DoZ
|
|
||||||
/pGyu1qpcybLfiR8mpQkCeU/iBq5gIjWddbVjlYoTKfqULZrpsAF2AeqELEgyshl
|
|
||||||
p3PNhW/54TJSn4mWK+39BibYHPkvx8orEuWKyjjRk82hEXi7J3hsGKX29qC3oO40
|
|
||||||
67DKDWmZdMCz+E1ERf10V0bSp6iJnnlwknHJloZUETV1NY/DdoSC6e8CN0+0cQqL
|
|
||||||
aJefJ483O3sXyN3v3+DaEFBLPFgRFGZB7eaBwR2xAv/KfjT5dSyi+wA4LZAxsQMC
|
|
||||||
Q7UYGNAfHLNHJo/bsj12+JDhJaFZ/KoBKzyMUuEXmvjxXNDMCfm+gVQFoLyXkGq3
|
|
||||||
491W/O7LjR6pkD+ce0qeTFMu3nfUubyfbONVDEfuH4GC1e+FAggCRaBnFsVzCzXj
|
|
||||||
jxOOLoQ9nwLk8v17mx0BSwX4iuqvXFntfJbzfcnzQfx/qqPFheIbGnmKw1lrRML8
|
|
||||||
87ZbN6t01+v2YyYe6Mc7p80s1R3jc8aVX8ca2KcYwsJAkg/xz0q5RJwsE1is5UY=
|
|
||||||
-----END CERTIFICATE-----
|
|
|
@ -1,52 +0,0 @@
|
||||||
-----BEGIN PRIVATE KEY-----
|
|
||||||
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQDSC88PF8NyLkag
|
|
||||||
K5mAZ/d739SOU16l2Cx3zE35zZQhO29+1L4L+oMksLYipo+FMgtGO+MSzFsvGgKC
|
|
||||||
s2sDSdfyoNSTZ3QaN4BAZ0sbq+wLcke7yRBTM/XIGOQfhqq8yC2q8/zXwUbZg0Us
|
|
||||||
CAxDGNwUr0Qlm829laIU/UN1KcYS57Nebl1z05wMEvYmyl4JBAl9ozne7KS9DyW7
|
|
||||||
jbrAXE8TaEy3+pY66kx5GG6v2+upScITGm4YPmPPlpOF1UjQloosgxdVa+fVp8aN
|
|
||||||
Ca/rf0JNO0Uhb3OKOZ+4kYmpfPn/trwoKWAa6CV1uAJ+3zDkLMq1JNlrV4OMp1Qv
|
|
||||||
X0wzA47a/n466JMN9SFb0Ng5wf19VOtT5Zu7chDStBudVjxlMDfUixvhvn4sjbaL
|
|
||||||
NYR1fyWPoNXwr0KX2lpTP1QOzp9/Sd0iiJ8RPfXn8Xo26MStu4I52CZjS7yEMgJG
|
|
||||||
CLH/mgPuSbrHHYYrrrCPJgmQOZG2TNMI+EqOwQvHh2ghdv7t7EEk4IslBk0QzufM
|
|
||||||
XQ2WFXQ20nvj74mrmmiMuBcmonpR0egA5/M18ZPLQxYu0Q86NUr4XHtAG1fq+n8p
|
|
||||||
seQ7Avy6Gk6HRiezCbB7TJ9rnNeujie1TDajC6W7rx0VF7hcxkIrDgNgnYcjXUV2
|
|
||||||
hMx1lo4fIoWkL3nJJVEthMVIcJOXEwIDAQABAoICAF45S+ZSW6uh1K7PQCnY+a0J
|
|
||||||
CJncDk5JPhFzhds0fGm39tknaCWJeEECQIIkw6cVfvc/sCpjn9fuTAgDolK0UnoV
|
|
||||||
6aZCN1P3Z8H8VDYSlm3AEyvLE1avrWbYu6TkzTyoc8wHbXn/yt+SQnpxFccXpMpm
|
|
||||||
oSRZ0x5jvHS79AHf/mnGpLEMw0FNQOgtrVxTVYGn3PYOPcyhzXi+Dcgn2QmnnxVu
|
|
||||||
qVOyxqehKTL9YdHjzsB/RN868P5RJocd3gmgVuyzS0KSf+oi4Ln4bFoiaVc0HDL3
|
|
||||||
DpjkHSl5lgu+xclRNfifKaK+hM0tLHi1VfFB//WrnjdKU3oSpQF4oowprM4Jn5AP
|
|
||||||
jhRI54JWZlWnvbiAOx7D49xFga3EnqjVH6So2gxi+q3Dv25luXGAnueaBPDpVC6c
|
|
||||||
nkJm2aCl7T3xlVpW8O5Fs+rsP8Xr9RTyEQJauM01uOi3N2zEeO8ERxTYEW5Sy2U7
|
|
||||||
OFKRXtLj7Jnejib/SxWGcIX4Wid5QFAygbXz4APfFN22QU0fqmhm4/c2OB/xM8qr
|
|
||||||
VVFx4xlG2wnuq5CZdZjmK3MTbmSM+pWW8mly/+++p694cf5oXGenYus/JWFNwxj/
|
|
||||||
fPyA7zQmaTOidu6clDHzkPCOE7TBv9TkQ7lL6ClgE7B39JR65ZQtjCYqRsADKsGI
|
|
||||||
dFMg+HDmGbVEfWg2V0GBAoIBAQDupImrJ0JXHA/0SEC2Tbz7pE60fRwmBFdhvk4Z
|
|
||||||
rzZiaOl+M2HXQU6b5DYhKcgdiFah5IuAnsRPo6X5Ug+Q1DV3OFTuEGAkXgqZliNa
|
|
||||||
aXsJcc0++DYlXX3BrTb66gylVLQRs5tZzsXps5iXWclziDC2go8RKnCwxsxwbzVq
|
|
||||||
FP4hoBP4dp83WoLF4NznnGFGw3/KLlMivtRxDE5OegpxTuWGlA/bVtT187Ksuuz3
|
|
||||||
dFUayLfpg0ABS/E7wwAJjSUpPPEi3J/G255H3lZXgS1gWcAf3rGDQYlJKF8UHdja
|
|
||||||
yWQcAOF+b/bYEpa4lHw+UtKNNkPTiCV4Y7CNQd8a2Gcl7VFTAoIBAQDhUs9r1dhm
|
|
||||||
rUlNAunVZZZVZ91XhXeqVTa/9xUDEvDh91nB5c7CcuNXxwcX4oTsMF4Bc7CHlvOv
|
|
||||||
pybp+QLjK310VjxxkFYJT0TKWuYqLjtNkQ93sp8wF3gVCf8m8bMOX/gPfQzNZWKp
|
|
||||||
un+ZWnzXNU5d2A+63xbZmFzT0Zo6H/h9YEO5Xxw32HCKFzEhl5JD34muZTEXSpdD
|
|
||||||
p7LUUr5LvnoUqEzonhXx2qRnTLP87d1o0GlkVex9HeeeBgrvm57QYoJnABxw9UFM
|
|
||||||
/ocLeYsjkmqJQRBDWgiwQlos1pdZyX2Yj20b7Wm5Pxd4aM9gh5EZZMXeQHhbHlWz
|
|
||||||
UY1IPxfAkytBAoIBAHmYavFDisD58oMlAZwiViXeXaAHk30nfyK1pfPeXBaeoEKG
|
|
||||||
idb1VsmF6bLSKD4sBwBshExgGWT+3IYCMx43kpqRoGzA+UvugvYpExBxaJiyXMM2
|
|
||||||
E9jMH1S9HqOQ+CqR00KlwoVrH1rqANk1jbkJbtDAC4fSmSLp2Kd9crj/w1F80FAs
|
|
||||||
mQnKW5HZ9pUpEEPPP2DUY9XzaCnF/GxuML31VmxRKxc20kIUDzmF8VJQ+0Avf85C
|
|
||||||
6yz99gfeXzl+qq2teKyrv9nCc47pEhN6JZXPhV53yPk5PmuBX5jPcHxiW1kNddhH
|
|
||||||
0n3cUuHv/rJ+3vvG555z46vJF9+R7c0u8LfZiTMCggEBAMQd4a/IN0xXM1+2U3SL
|
|
||||||
sSew+XR+FMPK25aGJmHAkKz9L8CWlzmj6cCy2LevT2aMSqYU3eeGOZ//at1nAV5c
|
|
||||||
shsaHA30RQ5hUkyWhZLdHnzK752NeQTQyJH3W3+4C9NNMIm6m/QCdLeqPflqSxK9
|
|
||||||
sPH5ZueN2UOXW+R5oTVKMmxd51RnNhZdasamnPrSBFrTK/EA3pOZNsOKKRqo0jz3
|
|
||||||
Eyb7vcUSI6OYXFQU7OwO1RGvpKvSJb5Y0wo11DrtRnO16i5gaGDg9u9e8ofISJSz
|
|
||||||
kcrZOKCGst1HQ1mXhbB+sbSh0aPnJog4I+OHxkgMdvyVO6vQjXExnAIxzzi8wZ25
|
|
||||||
+oECggEBAIT6q/sn8xFt5Jwc/0Z7YUjd415Nknam09tnbB+UPRR6lt6JFoILx8by
|
|
||||||
5Y1sN30HWDv27v9G32oZhUDii3Rt3PkbYLqlHy7XBMEXA9WIUo+3Be7mtdL8Wfrj
|
|
||||||
0zn0b7Hks9a9KsElG1dXUopwjMRL3M22UamaN7e/gl5jz2I7pyc5oaqz9GRDV5yG
|
|
||||||
slb6gGZ5naMycJD3p8vutXbmgKRr9beRp55UICAbEMdr5p3ks8bfR33Z6t+a97u1
|
|
||||||
IxI5x5Lb0fdfvL8JK3nRWn7Uzbmm5Ni/OaODNKP+fIm9m2yDAs8LM8RGpPtk6i0d
|
|
||||||
qIRta3H9KNw2Mhpkm77TtUSV/W5aOmY=
|
|
||||||
-----END PRIVATE KEY-----
|
|