Compare commits

..

149 Commits

Author SHA1 Message Date
Vladimir Mandic 189226d63a full rebuild
Signed-off-by: Vladimir Mandic <mandic00@live.com>
2025-02-05 09:15:34 -05:00
Vladimir Mandic f587b44f66 1.7.15 2025-02-05 09:02:09 -05:00
Vladimir Mandic e3f11b8533 update build platform
Signed-off-by: Vladimir Mandic <mandic00@live.com>
2025-02-05 09:02:06 -05:00
Vladimir Mandic 171d17cadf update changelog 2024-09-10 11:31:01 -04:00
Vladimir Mandic e4cdf624c9 update build environment and full rebuild 2024-09-10 11:30:23 -04:00
Vladimir Mandic c633f9fbe4 1.7.14 2024-09-10 11:17:44 -04:00
Vladimir Mandic ffc3c40362 rebuild 2024-01-20 15:46:59 -05:00
Vladimir Mandic a8193f9077
Merge pull request #188 from rebser/master
fixing leaking EventHandlers when using HTMLCanvasElement
2024-01-20 15:45:04 -05:00
rebser 155f07dccd
fixing leaking EventHandlers when using HTMLCanvasElement 2024-01-19 08:38:59 +01:00
Vladimir Mandic 2f0469fe6e update readme 2024-01-17 17:04:22 -05:00
Vladimir Mandic 697b265337 rebuild types 2024-01-17 17:01:20 -05:00
Vladimir Mandic 4719b81587 rebuild 2024-01-17 16:56:53 -05:00
Vladimir Mandic fc9a39ea13 1.7.13 2024-01-17 16:44:28 -05:00
Vladimir Mandic 438897c5a2 update all dependencies 2024-01-17 16:44:24 -05:00
Vladimir Mandic f4d4780267
Merge pull request #186 from khwalkowicz/master
feat: enable noImplicitAny
2024-01-17 16:06:03 -05:00
Kamil H. Walkowicz a5c767fdff feat: enable noImplicitAny 2024-01-16 18:09:52 +01:00
Vladimir Mandic 1fa29b0fd3 update tfjs and rebuild 2023-06-12 12:02:21 -04:00
Vladimir Mandic 472f2e4480 1.7.12 2023-06-12 12:01:45 -04:00
Vladimir Mandic 4433ce44bc update dependencies 2023-05-08 09:08:30 -04:00
Vladimir Mandic 4ca829f941 1.7.11 2023-05-08 09:08:05 -04:00
Vladimir Mandic 038349968c update tfjs 2023-03-21 08:00:18 -04:00
Vladimir Mandic ae96c7b230 1.7.10 2023-03-21 07:59:27 -04:00
Vladimir Mandic f9f036ba01 change typedefs 2023-01-29 10:08:46 -05:00
Vladimir Mandic 0736a99250 1.7.9 2023-01-29 09:00:29 -05:00
Vladimir Mandic 3ea729badb update dependencies 2023-01-21 09:06:35 -05:00
Vladimir Mandic d36ed6d266 update changelog 2023-01-06 13:25:52 -05:00
Vladimir Mandic 4061d4d62f update tfjs 2023-01-06 13:24:17 -05:00
Vladimir Mandic b034c46f80 1.7.8 2023-01-06 13:04:31 -05:00
Vladimir Mandic aefd776a9e update dependencies 2022-12-21 14:14:22 -05:00
Vladimir Mandic 20eb54beb4 update 2022-12-04 14:14:05 -05:00
Vladimir Mandic e8301c5277 update 2022-12-04 13:23:41 -05:00
Vladimir Mandic fba823ba50 update tfjs 2022-12-01 14:56:40 -05:00
Vladimir Mandic a1cb6de1e8 1.7.7 2022-12-01 14:55:47 -05:00
Vladimir Mandic fb3836019f update dependencies 2022-11-12 11:54:00 -05:00
Vladimir Mandic 15ae496f40 update release 2022-10-18 07:23:49 -04:00
Vladimir Mandic 0009d1bc34 1.7.6 2022-10-18 07:23:04 -04:00
Vladimir Mandic adc4b3a11d update dependencies 2022-10-18 07:10:40 -04:00
Sohaib Ahmed 7e5a1289ff
Fix face angles (yaw, pitch, & roll) accuracy (#130)
Previouly derived aforementioned angles correctly seemed inaccurate and somewhat unusable (given their output was in radians). This update uses the a person's mesh positions, and chooses specific points for accurate results. It also adds directionality of the movements (_e.g. pitching head backwards is a negative result, as is rolling head to the left).

The webcam.js file has also been updated to showcase the correct output in degrees (reducing potential user confusion)

Comitter: Sohaib Ahmed <sohaibi.ahmed@icloud.com>

Co-authored-by: Sophia Glisch <sophiaglisch@Sophias-MacBook-Pro.local>
2022-10-18 07:09:35 -04:00
Vladimir Mandic cd2c553737 update tfjs 2022-10-14 08:01:39 -04:00
Vladimir Mandic a433fc0681 1.7.5 2022-10-09 13:42:45 -04:00
Vladimir Mandic f9902b0459 update readme 2022-10-09 13:42:38 -04:00
Vladimir Mandic bd5ab6bb0f update 2022-10-09 13:41:11 -04:00
Vladimir Mandic 96fed4f123 update tfjs 2022-10-09 13:40:33 -04:00
Vladimir Mandic 0cbfd9b01b update dependencies 2022-09-29 10:38:14 -04:00
Vladimir Mandic dea225bbeb
Create FUNDING.yml 2022-09-26 09:39:08 -04:00
Vladimir Mandic 602e86cbec add node-wasm demo 2022-09-25 16:40:42 -04:00
Vladimir Mandic 00bf49b24f 1.7.4 2022-09-25 16:39:22 -04:00
Vladimir Mandic fa33c1281c improve face compare performance 2022-09-14 08:18:51 -04:00
Vladimir Mandic 7f613367a3 update tfjs and typescript 2022-09-04 15:18:07 -04:00
Vladimir Mandic 4d65f459f9 update tfjs 2022-08-24 08:21:15 -04:00
Vladimir Mandic d28e5d2142 1.7.3 2022-08-24 08:20:11 -04:00
Vladimir Mandic 6aeb292453 refresh release 2022-08-23 08:26:07 -04:00
Vladimir Mandic 289faf17f2 1.7.2 2022-08-23 08:25:42 -04:00
Vladimir Mandic 7a6f7d96b7 document and remove optional dependencies 2022-08-23 08:21:20 -04:00
Vladimir Mandic 870eebedfa update dependencies 2022-08-22 13:17:39 -04:00
Vladimir Mandic 1ed702f713 update readme 2022-08-16 20:25:26 -04:00
Nina Egger b2a988e436
update readme 2022-08-03 15:14:56 -04:00
Vladimir Mandic 5c38676a83 update build platform 2022-07-29 09:24:51 -04:00
Vladimir Mandic bac0ef10cf update readme 2022-07-26 07:27:52 -04:00
Vladimir Mandic 8baef0ef68 update links 2022-07-25 08:38:52 -04:00
Vladimir Mandic c5dbb9d4e9 release build 2022-07-25 08:23:57 -04:00
Vladimir Mandic a8021dc2a3 1.7.1 2022-07-25 08:21:02 -04:00
Vladimir Mandic f946780bab refactor dependencies 2022-07-25 08:20:59 -04:00
Vladimir Mandic 8e7061a9aa full rebuild 2022-05-24 07:18:59 -04:00
Vladimir Mandic cd904ca5dd 1.6.11 2022-05-24 07:18:51 -04:00
Vladimir Mandic 496779fee2 1.6.10 2022-05-24 07:17:40 -04:00
Vladimir Mandic 4ba4a99ee1 update tfjs 2022-05-24 07:16:42 -04:00
Vladimir Mandic 31170e750b update changelog 2022-05-18 08:36:24 -04:00
Vladimir Mandic 5f58cd376d update tfjs 2022-05-18 08:36:05 -04:00
Vladimir Mandic 07eb00d7d6 1.6.9 2022-05-18 08:21:59 -04:00
Vladimir Mandic a1f7a0841f update libraries 2022-05-09 08:12:24 -04:00
Vladimir Mandic 49a594a59b 1.6.8 2022-05-09 08:11:31 -04:00
Vladimir Mandic 3b3ab219dc update dependencies 2022-04-09 09:48:06 -04:00
Vladimir Mandic 2fce7338dc exclude impossible detected face boxes 2022-04-05 07:38:11 -04:00
Vladimir Mandic 6cafeafba1 update tfjs 2022-04-01 09:16:17 -04:00
Vladimir Mandic d0f1349a23 1.6.7 2022-04-01 09:15:45 -04:00
abdemirza cdb0e485f8
fixed typo error (#97)
Co-authored-by: Abuzar Mirza <abdermiza@gmail.com>
2022-03-10 06:48:14 -05:00
Vladimir Mandic 5bcc4d2a73 update changelog 2022-03-07 13:17:54 -05:00
Vladimir Mandic 92008ed6f4 update tfjs and ts 2022-03-07 13:17:31 -05:00
Vladimir Mandic c1b38f99fe 1.6.6 2022-03-04 16:48:47 -05:00
Vladimir Mandic 0c5251c219 toolkit refresh 2022-02-07 09:43:35 -05:00
Vladimir Mandic fcf61e5c30 1.6.5 2022-02-07 09:41:55 -05:00
Vladimir Mandic 8c7e21b1c9 update tfjs and expand readme 2022-01-14 10:04:13 -05:00
Vladimir Mandic 2841969df8 1.6.4 2022-01-14 09:54:19 -05:00
Vladimir Mandic 39b137ed63 add node with wasm build target 2022-01-06 07:59:13 -05:00
Vladimir Mandic c53becfc67 1.6.3 2022-01-06 07:58:05 -05:00
Vladimir Mandic fd427cce39 update lint 2022-01-01 07:55:12 -05:00
Vladimir Mandic 43805b50c6 update demos 2022-01-01 07:52:40 -05:00
Vladimir Mandic fc18d89ab6 1.6.2 2022-01-01 07:51:51 -05:00
Vladimir Mandic 0de113080c update 2021-12-27 10:52:58 -05:00
Vladimir Mandic 471ddb7549 update 2021-12-14 15:42:06 -05:00
Vladimir Mandic 70991235df update tfjs 2021-12-09 14:22:22 -05:00
Vladimir Mandic c07be32e26 1.6.1 2021-12-09 14:20:24 -05:00
Vladimir Mandic 936ecba7ec update build 2021-12-06 21:43:06 -05:00
Vladimir Mandic 63476fcbc0 rebuild 2021-12-06 06:34:50 -05:00
Vladimir Mandic 62da12758f update 2021-12-03 11:32:42 -05:00
Vladimir Mandic bd4d5935fe update 2021-12-03 11:28:27 -05:00
Vladimir Mandic 118fbaba4d release preview 2021-12-01 17:21:12 -05:00
Vladimir Mandic e70d9bb18b switch to custom tfjs and new typedefs 2021-12-01 15:37:52 -05:00
Vladimir Mandic f1a2ef34a5 rebuild 2021-12-01 07:51:57 -05:00
Vladimir Mandic e7fd0efd27 1.5.8 2021-11-30 13:17:15 -05:00
Vladimir Mandic eb5501c672 update tfjs 2021-10-28 13:58:21 -04:00
Vladimir Mandic 8b304fa3d4 1.5.7 2021-10-28 13:56:38 -04:00
Vladimir Mandic 1824a62efb update readme 2021-10-23 09:52:51 -04:00
Vladimir Mandic bd2317d42e update tfjs to 3.10.0 2021-10-22 09:06:43 -04:00
Vladimir Mandic 1def723c7b 1.5.6 2021-10-22 09:01:27 -04:00
Vladimir Mandic d78dd3aae1 update dependencies and stricter linting rules 2021-10-19 08:04:24 -04:00
Vladimir Mandic 461e074993 1.5.5 2021-10-19 07:54:26 -04:00
Vladimir Mandic 1d30a9f816 rebuild 2021-09-30 13:45:23 -04:00
Vladimir Mandic fcbfc8589a allow backend change in demo via url params 2021-09-30 13:43:15 -04:00
Vladimir Mandic c7b2c65c97 add node-match demo 2021-09-29 13:03:02 -04:00
Vladimir Mandic 1b4580dd6e fix face matcher 2021-09-29 09:32:30 -04:00
Vladimir Mandic fdddee7101 1.5.4 2021-09-29 09:31:42 -04:00
Vladimir Mandic aee959f464 update build platform and typedoc template 2021-09-18 18:38:13 -04:00
Vladimir Mandic f70e5615b4 update release 2021-09-16 08:31:45 -04:00
Vladimir Mandic 4ba43e08ae 1.5.3 2021-09-16 08:30:53 -04:00
Vladimir Mandic c3049e7c29 simplify tfjs imports 2021-09-16 08:30:50 -04:00
Vladimir Mandic e2609a0ef2 update sourcemaps 2021-09-11 11:14:57 -04:00
Vladimir Mandic d13586f549 reduce bundle size 2021-09-11 11:11:38 -04:00
Vladimir Mandic 519e346f02 enable webgl uniforms 2021-09-10 10:24:33 -04:00
Vladimir Mandic efb307d230 1.5.2 2021-09-10 10:22:09 -04:00
Vladimir Mandic 47f2b53e92 update dependencies 2021-09-08 13:57:03 -04:00
Vladimir Mandic 9b810d8028 redesign build platform 2021-09-08 13:51:28 -04:00
Vladimir Mandic f48cbda416 1.5.1 2021-09-08 13:50:47 -04:00
Vladimir Mandic ac172b8be5 update dependencies 2021-09-05 17:06:09 -04:00
Vladimir Mandic 2c8c8c2c1c update tfjs 3.9.0 2021-08-31 12:21:57 -04:00
Vladimir Mandic 9fb3029211 1.4.2 2021-08-31 12:21:05 -04:00
Vladimir Mandic 225192d18d update dependencies 2021-08-10 08:19:49 -04:00
Vladimir Mandic 8dab959446 update 2021-07-29 09:18:21 -04:00
Vladimir Mandic 42d9d677de update tfjs and typescript 2021-07-29 09:05:49 -04:00
Vladimir Mandic d5b366629b 1.4.1 2021-07-29 09:05:01 -04:00
Vladimir Mandic 1455c35c81 update typedoc 2021-06-18 07:19:03 -04:00
Vladimir Mandic 953ef705ab update with typedoc 4.3 2021-06-08 06:59:55 -04:00
Vladimir Mandic 00803107ce 1.3.1 2021-06-08 06:58:27 -04:00
Vladimir Mandic 2ac6baa02b update build and lint scripts 2021-06-04 09:17:04 -04:00
Vladimir Mandic 7ef748390c update for tfjs 3.7.0 2021-06-04 08:54:48 -04:00
Vladimir Mandic b4ba10898f update 2021-06-04 07:27:31 -04:00
Vladimir Mandic df47b3e2a9 update 2021-05-28 07:27:16 -04:00
Bettina Steger 76daa38bce
fix face expression detection (#56) 2021-05-28 07:26:21 -04:00
Vladimir Mandic e13a6d684b add bufferToVideo 2021-05-27 18:38:30 -04:00
Vladimir Mandic da426d5cfd fix git conflicts 2021-05-27 18:36:59 -04:00
Bettina Steger 1de3551a0b
fix TSC error (#55)
* add bufferToVideo and fetchVideo

* fixes for mov videos

* use oncanplay instead of timeout

* remove video.type
2021-05-27 18:33:47 -04:00
Vladimir Mandic 98ea06fb0e force typescript 4.2 due to typedoc incompatibility with ts 4.3 2021-05-27 16:04:17 -04:00
Vladimir Mandic bf84748777 1.2.5 2021-05-27 14:03:27 -04:00
Bettina Steger 25735fcb34
add bufferToVideo and fetchVideo (#54)
* add bufferToVideo and fetchVideo

* fixes for mov videos

* use oncanplay instead of timeout
2021-05-27 14:02:01 -04:00
Vladimir Mandic 7b8b30bfc9 update dependencies 2021-05-18 08:11:17 -04:00
Vladimir Mandic 107297015e 1.2.4 2021-05-18 08:10:36 -04:00
Vladimir Mandic b9c78b21b0 update tfjs version 2021-05-04 11:18:07 -04:00
Vladimir Mandic 1c577b6ede 1.2.3 2021-05-04 11:17:34 -04:00
675 changed files with 21285 additions and 229430 deletions

148
.build.json Normal file
View File

@ -0,0 +1,148 @@
{
"log": {
"enabled": false,
"debug": false,
"console": true,
"output": "build.log"
},
"profiles": {
"production": ["compile", "typings", "typedoc", "lint", "changelog"],
"development": ["serve", "watch", "compile"]
},
"clean": {
"locations": ["dist/*", "typedoc/*", "types/lib/src"]
},
"lint": {
"locations": [ "src/" ],
"rules": { }
},
"changelog": {
"log": "CHANGELOG.md"
},
"serve": {
"sslKey": "cert/https.key",
"sslCrt": "cert/https.crt",
"httpPort": 8000,
"httpsPort": 8001,
"documentRoot": ".",
"defaultFolder": "demo",
"defaultFile": "index.html"
},
"build": {
"global": {
"target": "es2018",
"treeShaking": true,
"ignoreAnnotations": true,
"sourcemap": false,
"banner": { "js": "/*\n Face-API\n homepage: <https://github.com/vladmandic/face-api>\n author: <https://github.com/vladmandic>'\n*/\n" }
},
"targets": [
{
"name": "tfjs/browser/tf-version",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-version.ts",
"output": "dist/tfjs.version.js"
},
{
"name": "tfjs/node/cpu",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/cpu",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/node/gpu",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node-gpu.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/gpu",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node-gpu.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/node/wasm",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node-wasm.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/wasm",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node-wasm.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/browser/esm/nobundle",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-browser.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/browser/esm/nobundle",
"platform": "browser",
"format": "esm",
"input": "src/index.ts",
"output": "dist/face-api.esm-nobundle.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/browser/esm/bundle",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-browser.ts",
"output": "dist/tfjs.esm.js"
},
{
"name": "faceapi/browser/iife/bundle",
"platform": "browser",
"format": "iife",
"globalName": "faceapi",
"minify": true,
"input": "src/index.ts",
"output": "dist/face-api.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/browser/esm/bundle",
"platform": "browser",
"format": "esm",
"sourcemap": true,
"input": "src/index.ts",
"output": "dist/face-api.esm.js",
"typings": "types/lib",
"typedoc": "typedoc",
"external": ["@tensorflow"]
}
]
},
"watch": {
"enabled": true,
"locations": [ "src/**" ]
},
"typescript": {
"allowJs": false
}
}

View File

@ -3,50 +3,74 @@
"env": { "env": {
"browser": true, "browser": true,
"commonjs": true, "commonjs": true,
"es6": true,
"node": true, "node": true,
"es2020": true "es2020": true
}, },
"parser": "@typescript-eslint/parser", "parser": "@typescript-eslint/parser",
"parserOptions": { "ecmaVersion": 2020 }, "parserOptions": { "ecmaVersion": "latest" },
"plugins": ["@typescript-eslint"], "plugins": [
"@typescript-eslint"
],
"extends": [ "extends": [
"eslint:recommended", "eslint:recommended",
"plugin:import/errors", "plugin:import/errors",
"plugin:import/warnings", "plugin:import/warnings",
"plugin:import/typescript",
"plugin:node/recommended", "plugin:node/recommended",
"plugin:promise/recommended", "plugin:promise/recommended",
"plugin:json/recommended-with-comments", "plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"airbnb-base" "airbnb-base"
], ],
"ignorePatterns": [ "node_modules", "types" ], "ignorePatterns": [ "node_modules", "types" ],
"settings": {
"import/resolver": {
"node": {
"extensions": [".js", ".ts"]
}
}
},
"rules": { "rules": {
"max-len": [1, 275, 3], "@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/ban-types": "off",
"@typescript-eslint/ban-ts-comment": "off",
"@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/no-var-requires": "off",
"@typescript-eslint/no-empty-object-type": "off",
"@typescript-eslint/no-require-imports": "off",
"camelcase": "off", "camelcase": "off",
"class-methods-use-this": "off", "class-methods-use-this": "off",
"default-param-last": "off",
"dot-notation": "off",
"func-names": "off",
"guard-for-in": "off",
"import/extensions": "off", "import/extensions": "off",
"import/no-cycle": "off", "import/no-extraneous-dependencies": "off",
"import/no-named-as-default": "off",
"import/no-unresolved": "off",
"import/prefer-default-export": "off", "import/prefer-default-export": "off",
"lines-between-class-members": "off",
"max-len": [1, 275, 3],
"newline-per-chained-call": "off",
"no-async-promise-executor": "off",
"no-await-in-loop": "off", "no-await-in-loop": "off",
"no-bitwise": "off",
"no-case-declarations":"off",
"no-continue": "off", "no-continue": "off",
"no-loop-func": "off",
"no-mixed-operators": "off", "no-mixed-operators": "off",
"no-param-reassign":"off", "no-param-reassign":"off",
"no-plusplus": "off", "no-plusplus": "off",
"no-regex-spaces": "off",
"no-restricted-globals": "off",
"no-restricted-syntax": "off", "no-restricted-syntax": "off",
"no-return-assign": "off", "no-return-assign": "off",
"no-underscore-dangle": "off", "no-underscore-dangle": "off",
"node/no-missing-import": "off", "no-promise-executor-return": "off",
"node/no-missing-import": ["error", { "tryExtensions": [".js", ".json", ".ts"] }],
"node/no-unpublished-import": "off",
"node/no-unpublished-require": "off",
"node/no-unsupported-features/es-syntax": "off", "node/no-unsupported-features/es-syntax": "off",
"no-lonely-if": "off",
"node/shebang": "off",
"object-curly-newline": "off",
"prefer-destructuring": "off", "prefer-destructuring": "off",
"radix": "off", "prefer-template":"off",
"object-curly-newline": "off" "promise/always-return": "off",
"promise/catch-or-return": "off",
"promise/no-nesting": "off",
"radix": "off"
} }
} }

13
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1,13 @@
# These are supported funding model platforms
github: [vladmandic]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

1
.gitignore vendored
View File

@ -1,3 +1,2 @@
node_modules node_modules
pnpm-lock.yaml pnpm-lock.yaml

View File

@ -1,3 +1,5 @@
node_modules node_modules
pnpm-lock.yaml pnpm-lock.yaml
typedoc
test
types/lib

4
.npmrc
View File

@ -1 +1,5 @@
force=true force=true
production=true
legacy-peer-deps=true
strict-peer-dependencies=false
node-options='--no-deprecation'

3
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,3 @@
{
"typescript.tsdk": "node_modules/typescript/lib"
}

View File

@ -1,19 +1,194 @@
# @vladmandic/face-api # @vladmandic/face-api
Version: **1.2.2** Version: **1.7.15**
Description: **FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS** Description: **FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS**
Author: **Vladimir Mandic <mandic00@live.com>** Author: **Vladimir Mandic <mandic00@live.com>**
License: **MIT** </LICENSE> License: **MIT**
Repository: **<git+https://github.com/vladmandic/face-api.git>** Repository: **<https://github.com/vladmandic/face-api>**
## Changelog ## Changelog
### **1.7.15** 2025/02/05 mandic00@live.com
### **origin/master** 2024/09/10 mandic00@live.com
### **1.7.14** 2024/09/10 mandic00@live.com
- rebuild
- merge pull request #188 from rebser/master
- fixing leaking eventhandlers when using htmlcanvaselement
- rebuild types
- rebuild
### **1.7.13** 2024/01/17 mandic00@live.com
- merge pull request #186 from khwalkowicz/master
- feat: enable noimplicitany
### **release: 1.7.12** 2023/06/12 mandic00@live.com
### **1.7.12** 2023/06/12 mandic00@live.com
### **1.7.11** 2023/05/08 mandic00@live.com
### **1.7.10** 2023/03/21 mandic00@live.com
- change typedefs
### **1.7.9** 2023/01/29 mandic00@live.com
### **1.7.8** 2023/01/06 mandic00@live.com
### **1.7.7** 2022/12/01 mandic00@live.com
### **1.7.6** 2022/10/18 mandic00@live.com
- fix face angles (yaw, pitch, & roll) accuracy (#130)
### **1.7.5** 2022/10/09 mandic00@live.com
- create funding.yml
- add node-wasm demo
### **1.7.4** 2022/09/25 mandic00@live.com
- improve face compare performance
### **1.7.3** 2022/08/24 mandic00@live.com
- refresh release
### **1.7.2** 2022/08/23 mandic00@live.com
- document and remove optional dependencies
### **release: 1.7.1** 2022/07/25 mandic00@live.com
### **1.7.1** 2022/07/25 mandic00@live.com
- refactor dependencies
- full rebuild
### **1.6.11** 2022/05/24 mandic00@live.com
### **1.6.10** 2022/05/24 mandic00@live.com
### **1.6.9** 2022/05/18 mandic00@live.com
### **1.6.8** 2022/05/09 mandic00@live.com
- exclude impossible detected face boxes
### **1.6.7** 2022/04/01 mandic00@live.com
- fixed typo error (#97)
### **1.6.6** 2022/03/04 mandic00@live.com
### **1.6.5** 2022/02/07 mandic00@live.com
### **1.6.4** 2022/01/14 mandic00@live.com
- add node with wasm build target
### **1.6.3** 2022/01/06 mandic00@live.com
### **1.6.2** 2022/01/01 mandic00@live.com
### **1.6.1** 2021/12/09 mandic00@live.com
- rebuild
- release preview
- switch to custom tfjs and new typedefs
- rebuild
### **1.5.8** 2021/11/30 mandic00@live.com
### **1.5.7** 2021/10/28 mandic00@live.com
### **1.5.6** 2021/10/22 mandic00@live.com
### **release: 1.5.5** 2021/10/19 mandic00@live.com
### **1.5.5** 2021/10/19 mandic00@live.com
- allow backend change in demo via url params
- add node-match demo
- fix face matcher
### **1.5.4** 2021/09/29 mandic00@live.com
### **1.5.3** 2021/09/16 mandic00@live.com
- simplify tfjs imports
- reduce bundle size
- enable webgl uniforms
### **1.5.2** 2021/09/10 mandic00@live.com
- redesign build platform
### **1.5.1** 2021/09/08 mandic00@live.com
### **1.4.2** 2021/08/31 mandic00@live.com
### **release: 1.4.1** 2021/07/29 mandic00@live.com
### **1.4.1** 2021/07/29 mandic00@live.com
### **release: 1.3.1** 2021/06/18 mandic00@live.com
### **1.3.1** 2021/06/08 mandic00@live.com
- fix face expression detection (#56)
- add buffertovideo
- fix git conflicts
- fix tsc error (#55)
- force typescript 4.2 due to typedoc incompatibility with ts 4.3
### **1.2.5** 2021/05/27 mandic00@live.com
- add buffertovideo and fetchvideo (#54)
### **1.2.4** 2021/05/18 mandic00@live.com
### **1.2.3** 2021/05/04 mandic00@live.com
### **update for tfjs 3.6.0** 2021/04/30 mandic00@live.com
### **1.2.2** 2021/04/30 mandic00@live.com ### **1.2.2** 2021/04/30 mandic00@live.com
- add node-wasm demo
### **origin/master** 2021/04/26 mandic00@live.com
- accept uri as input to demo node and node-canvas - accept uri as input to demo node and node-canvas
- major version full rebuild - major version full rebuild
@ -82,111 +257,61 @@ Repository: **<git+https://github.com/vladmandic/face-api.git>**
- add badges - add badges
- optimize for npm - optimize for npm
- 0.30.6
### **0.30.6** 2021/03/08 mandic00@live.com
- added typings for face angle - added typings for face angle
- disable landmark printing - disable landmark printing
- 0.30.5
### **0.30.5** 2021/03/07 mandic00@live.com
- enabled live demo on gitpages - enabled live demo on gitpages
- 0.30.4
### **0.30.4** 2021/03/07 mandic00@live.com
- added face angle calculations - added face angle calculations
- added documentation - added documentation
- package update - package update
- 0.30.3
### **0.30.3** 2021/03/04 mandic00@live.com - 0.30.2
- 0.30.1
- 0.13.3
### **0.30.2** 2021/02/26 mandic00@live.com
### **0.30.1** 2021/02/25 mandic00@live.com
### **0.13.3** 2021/02/21 mandic00@live.com
- added note-cpu target - added note-cpu target
- merge pull request #39 from xemle/feature/node-cpu - merge pull request #39 from xemle/feature/node-cpu
- add node-cpu build for non supported systems of libtensorflow - add node-cpu build for non supported systems of libtensorflow
- 0.13.2
### **0.13.2** 2021/02/20 mandic00@live.com - 0.13.1
- 0.12.10
- exception handling
### **0.13.1** 2021/02/20 mandic00@live.com - 0.12.9
- exception handling
- 0.12.8
### **0.12.10** 2021/02/20 mandic00@live.com
- exception handling - exception handling
### **0.12.9** 2021/02/20 mandic00@live.com
### **0.12.8** 2021/02/20 mandic00@live.com
### **0.12.7** 2021/02/17 mandic00@live.com ### **0.12.7** 2021/02/17 mandic00@live.com
- 0.12.7 - 0.12.7
- 0.12.6
### **0.12.6** 2021/02/13 mandic00@live.com - 0.12.5
- 0.12.4
- 0.12.3
### **0.12.5** 2021/02/12 mandic00@live.com - 0.12.2
### **0.12.4** 2021/02/06 mandic00@live.com
### **0.12.3** 2021/02/06 mandic00@live.com
### **0.12.2** 2021/02/02 mandic00@live.com
### **update for tfjs 3.0.0** 2021/01/29 mandic00@live.com ### **update for tfjs 3.0.0** 2021/01/29 mandic00@live.com
- 0.12.1
### **0.12.1** 2021/01/29 mandic00@live.com
- rebuild - rebuild
- 0.11.6
### **0.11.6** 2021/01/24 mandic00@live.com
- add check for null face descriptor - add check for null face descriptor
- merge pull request #34 from patrickhulce/patch-1 - merge pull request #34 from patrickhulce/patch-1
- fix: return empty descriptor for zero-sized faces - fix: return empty descriptor for zero-sized faces
- 0.11.5
### **0.11.5** 2021/01/22 mandic00@live.com - 0.11.4
- 0.11.3
### **0.11.4** 2021/01/22 mandic00@live.com
### **0.11.3** 2021/01/20 mandic00@live.com
- fix typo - fix typo
- enable full minification - enable full minification
- 0.11.2
### **0.11.2** 2021/01/12 mandic00@live.com
- full rebuild - full rebuild
- 0.11.1
### **0.11.1** 2021/01/10 mandic00@live.com
- added live webcam demo - added live webcam demo
- 0.10.2
### **0.10.2** 2021/01/03 mandic00@live.com
- ts linting - ts linting
- version bump - version bump
- 0.10.1
### **0.10.1** 2020/12/23 mandic00@live.com
- full re-lint and typings generation - full re-lint and typings generation
- rebuild - rebuild

171
README.md
View File

@ -53,18 +53,18 @@ Example can be accessed directly using Git pages using URL:
### NodeJS ### NodeJS
Three NodeJS examples are: NodeJS examples are:
- `/demo/node-simple.js`:
Simplest possible NodeJS demo for FaceAPI in under 30 lines of JavaScript code
- `/demo/node.js`: - `/demo/node.js`:
Regular usage of `FaceAPI` from `NodeJS`
Using `TFJS` native methods to load images without external dependencies Using `TFJS` native methods to load images without external dependencies
- `/demo/node-canvas.js`: - `/demo/node-canvas.js` and `/demo/node-image.js`:
Regular usage of `FaceAPI` from `NodeJS`
Using external `canvas` module to load images Using external `canvas` module to load images
Which also allows for image drawing and saving inside `NodeJS` environment Which also allows for image drawing and saving inside `NodeJS` environment
- `/demo/node-wasm.js`: - `/demo/node-match.js`:
Same as `node-canvas`, but using `WASM` backend in `NodeJS` environment Simple demo that compares face similarity from a given image
Because why not :) to a second image or list of images in a folder
- `/demo/node-multiprocess.js`: - `/demo/node-multiprocess.js`:
Multiprocessing showcase that uses pool of worker processes Multiprocessing showcase that uses pool of worker processes
(`node-multiprocess-worker.js`) (`node-multiprocess-worker.js`)
@ -104,8 +104,11 @@ Three NodeJS examples are:
2021-03-14 08:42:09 STATE: Main: worker exit: 1888019 0 2021-03-14 08:42:09 STATE: Main: worker exit: 1888019 0
``` ```
Note that `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu` ### NodeJS Notes
must be installed before using NodeJS example - Supported NodeJS versions are **14** up to **22**
NodeJS version **23** and higher are not supported due to incompatibility with TensorFlow/JS
- `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu`
must be installed before using any **NodeJS** examples
<br><hr><br> <br><hr><br>
@ -133,8 +136,6 @@ Simply include latest version of `FaceAPI` directly from a CDN in your HTML:
*without* TFJS pre-bundled *without* TFJS pre-bundled
- `dist/face-api.node-gpu.js`: CommonJS format for server-side NodeJS execution - `dist/face-api.node-gpu.js`: CommonJS format for server-side NodeJS execution
*without* TFJS pre-bundled and optimized for CUDA GPU acceleration *without* TFJS pre-bundled and optimized for CUDA GPU acceleration
- `dist/face-api.node-cpu.js`: CommonJS format for server-side NodeJS execution
*without* TFJS pre-bundled and using JS engine for platforms where tensorflow binary library version is not available
Defaults are: Defaults are:
@ -151,7 +152,7 @@ Bundled `TFJS` can be used directly via export: `faceapi.tf`
Reason for additional `nobundle` version is if you want to Reason for additional `nobundle` version is if you want to
include a specific version of TFJS and not rely on pre-packaged one include a specific version of TFJS and not rely on pre-packaged one
`FaceAPI` is compatible with TFJS 2.0+ `FaceAPI` is compatible with TFJS 2.0+ and TFJS 3.0+
All versions include `sourcemap` All versions include `sourcemap`
@ -260,7 +261,7 @@ If you want to GPU Accelerated execution in NodeJS, you must have CUDA libraries
Then install appropriate version of `FaceAPI`: Then install appropriate version of `FaceAPI`:
```shell ```shell
npm install @tensorflow/tfjs-node npm install @tensorflow/tfjs-node-gpu
npm install @vladmandic/face-api npm install @vladmandic/face-api
``` ```
@ -271,18 +272,24 @@ And then use with:
const faceapi = require('@vladmandic/face-api/dist/face-api.node-gpu.js'); // this loads face-api version with correct bindings for tfjs-node-gpu const faceapi = require('@vladmandic/face-api/dist/face-api.node-gpu.js'); // this loads face-api version with correct bindings for tfjs-node-gpu
``` ```
If you want to use `FaceAPI` in a NodeJS on platforms where NodeJS binary libraries are not supported, you can use JavaScript CPU backend. If you want to use `FaceAPI` in a NodeJS on platforms where **tensorflow** binary libraries are not supported, you can use NodeJS **WASM** backend.
```shell ```shell
npm install @tensorflow/tfjs npm install @tensorflow/tfjs
npm install @tensorflow/tfjs-backend-wasm
npm install @vladmandic/face-api npm install @vladmandic/face-api
``` ```
And then use with: And then use with:
```js ```js
const tf = require('@tensorflow/tfjs') const tf = require('@tensorflow/tfjs');
const faceapi = require('@vladmandic/face-api/dist/face-api.node-cpu.js'); const wasm = require('@tensorflow/tfjs-backend-wasm');
const faceapi = require('@vladmandic/face-api/dist/face-api.node-wasm.js'); // use this when using face-api in dev mode
wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/');
await tf.setBackend('wasm');
await tf.ready();
...
``` ```
If you want to use graphical functions inside NodeJS, If you want to use graphical functions inside NodeJS,
@ -310,12 +317,14 @@ faceapi.env.monkeyPatch({ Canvas, Image, ImageData })
## Weights ## Weights
Pretrained models and their weights are includes in `./model`. Pretrained models and their weights are included in `./model`.
<br><hr><br> <br><hr><br>
## Test & Dev Web Server ## Test & Dev Web Server
To install development dependencies, use `npm install --production=false`
Built-in test&dev web server can be started using Built-in test&dev web server can be started using
```shell ```shell
@ -327,16 +336,47 @@ By default it starts HTTP server on port 8000 and HTTPS server on port 8001 and
- <https://localhost:8001/demo/index.html> - <https://localhost:8001/demo/index.html>
- <https://localhost:8001/demo/webcam.html> - <https://localhost:8001/demo/webcam.html>
```json ```js
2021-03-14 08:41:09 INFO: @vladmandic/face-api version 1.0.2 2022-01-14 09:56:19 INFO: @vladmandic/face-api version 1.6.4
2021-03-14 08:41:09 INFO: User: vlado Platform: linux Arch: x64 Node: v15.7.0 2022-01-14 09:56:19 INFO: User: vlado Platform: linux Arch: x64 Node: v17.2.0
2021-03-14 08:41:09 INFO: Build: file startup all target: es2018 2022-01-14 09:56:19 INFO: Application: { name: '@vladmandic/face-api', version: '1.6.4' }
2021-03-14 08:41:09 STATE: HTTP server listening: 8000 2022-01-14 09:56:19 INFO: Environment: { profile: 'development', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true }
2021-03-14 08:41:09 STATE: HTTP2 server listening: 8001 2022-01-14 09:56:19 INFO: Toolchain: { build: '0.6.7', esbuild: '0.14.11', typescript: '4.5.4', typedoc: '0.22.10', eslint: '8.6.0' }
2021-03-14 08:41:09 STATE: Monitoring: [ 'package.json', 'demo', 'src', [length]: 3 ] 2022-01-14 09:56:19 INFO: Build: { profile: 'development', steps: [ 'serve', 'watch', 'compile' ] }
2021-03-14 08:41:10 STATE: Build for: browserBundle type: tfjs: { modules: 1258, moduleBytes: 4040087, imports: 7, importBytes: 276, outputBytes: 1072314, outputFiles: 'dist/tfjs.esm.js' } 2022-01-14 09:56:19 STATE: WebServer: { ssl: false, port: 8000, root: '.' }
2021-03-14 08:41:10 STATE: Build for: browserBundle type: iife: { imports: 160, importBytes: 1305679, outputBytes: 1151683, outputFiles: 'dist/face-api.js' } 2022-01-14 09:56:19 STATE: WebServer: { ssl: true, port: 8001, root: '.', sslKey: 'build/cert/https.key', sslCrt: 'build/cert/https.crt' }
2021-03-14 08:41:10 STATE: Build for: browserBundle type: esm: { imports: 160, importBytes: 1305679, outputBytes: 1151520, outputFiles: 'dist/face-api.esm.js' } 2022-01-14 09:56:19 STATE: Watch: { locations: [ 'src/**', 'README.md', 'src/**', 'src/**' ] }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 1276 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234787, outputBytes: 175203 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 1296 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234807, outputBytes: 175219 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 1367 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234878, outputBytes: 175294 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 1662 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 2172, outputBytes: 811 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234322, outputBytes: 169437 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 2172, outputBytes: 2444105 }
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 2677616, outputBytes: 1252572 }
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 2677616, outputBytes: 2435063 }
2022-01-14 09:56:20 INFO: Listening...
...
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/html', size: 1047, url: '/', remote: '::1' }
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 6919, url: '/index.js', remote: '::1' }
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 2435063, url: '/dist/face-api.esm.js', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 4125244, url: '/dist/face-api.esm.js.map', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 3219, url: '/model/tiny_face_detector_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 193321, url: '/model/tiny_face_detector_model.bin', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 28233, url: '/model/ssd_mobilenetv1_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 5616957, url: '/model/ssd_mobilenetv1_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8392, url: '/model/age_gender_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 429708, url: '/model/age_gender_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8485, url: '/model/face_landmark_68_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 356840, url: '/model/face_landmark_68_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 19615, url: '/model/face_recognition_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 6444032, url: '/model/face_recognition_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 6980, url: '/model/face_expression_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 329468, url: '/model/face_expression_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'image/jpeg', size: 144516, url: '/sample1.jpg', remote: '::1' }
``` ```
<br><hr><br> <br><hr><br>
@ -360,35 +400,41 @@ cd face-api
Then install all dependencies and run rebuild: Then install all dependencies and run rebuild:
```shell ```shell
npm install npm install --production=false
npm run build npm run build
``` ```
Build process uses script `build.js` that creates optimized build for each target: Build process uses `@vladmandic/build` module that creates optimized build for each target:
```text ```js
> @vladmandic/face-api@1.0.2 build > @vladmandic/face-api@1.7.1 build /home/vlado/dev/face-api
> rimraf dist/* types/* typedoc/* && node server/build.js > node build.js
```
```json 2022-07-25 08:21:05 INFO: Application: { name: '@vladmandic/face-api', version: '1.7.1' }
2021-03-14 08:39:21 INFO: @vladmandic/face-api version 1.0.2 2022-07-25 08:21:05 INFO: Environment: { profile: 'production', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true }
2021-03-14 08:39:21 INFO: User: vlado Platform: linux Arch: x64 Node: v15.7.0 2022-07-25 08:21:05 INFO: Toolchain: { build: '0.7.7', esbuild: '0.14.50', typescript: '4.7.4', typedoc: '0.23.9', eslint: '8.20.0' }
2021-03-14 08:39:21 INFO: Build: file startup all target: es2018 2022-07-25 08:21:05 INFO: Build: { profile: 'production', steps: [ 'clean', 'compile', 'typings', 'typedoc', 'lint', 'changelog' ] }
2021-03-14 08:39:21 STATE: Build for: node type: tfjs: { imports: 1, importBytes: 143, outputBytes: 731, outputFiles: 'dist/tfjs.esm.js' } 2022-07-25 08:21:05 STATE: Clean: { locations: [ 'dist/*', 'typedoc/*', 'types/lib/src' ] }
2021-03-14 08:39:21 STATE: Build for: node type: node: { imports: 160, importBytes: 234096, outputBytes: 85371, outputFiles: 'dist/face-api.node.js' } 2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 614 }
2021-03-14 08:39:21 STATE: Build for: nodeGPU type: tfjs: { imports: 1, importBytes: 147, outputBytes: 735, outputFiles: 'dist/tfjs.esm.js' } 2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234137, outputBytes: 85701 }
2021-03-14 08:39:21 STATE: Build for: nodeGPU type: node: { imports: 160, importBytes: 234100, outputBytes: 85379, outputFiles: 'dist/face-api.node-gpu.js' } 2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 618 }
2021-03-14 08:39:21 STATE: Build for: nodeCPU type: tfjs: { imports: 1, importBytes: 138, outputBytes: 726, outputFiles: 'dist/tfjs.esm.js' } 2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234141, outputBytes: 85705 }
2021-03-14 08:39:21 STATE: Build for: nodeCPU type: node: { imports: 160, importBytes: 234091, outputBytes: 85370, outputFiles: 'dist/face-api.node-cpu.js' } 2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 670 }
2021-03-14 08:39:21 STATE: Build for: browserNoBundle type: tfjs: { imports: 1, importBytes: 276, outputBytes: 244, outputFiles: 'dist/tfjs.esm.js' } 2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234193, outputBytes: 85755 }
2021-03-14 08:39:21 STATE: Build for: browserNoBundle type: esm: { imports: 160, importBytes: 233609, outputBytes: 82634, outputFiles: 'dist/face-api.esm-nobundle.js' } 2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 400 }
2021-03-14 08:39:22 STATE: Build for: browserBundle type: tfjs: { modules: 1258, moduleBytes: 4040087, imports: 7, importBytes: 276, outputBytes: 1072314, outputFiles: 'dist/tfjs.esm.js' } 2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 910, outputBytes: 527 }
2021-03-14 08:39:22 STATE: Build for: browserBundle type: iife: { imports: 160, importBytes: 1305679, outputBytes: 1151683, outputFiles: 'dist/face-api.js' } 2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234050, outputBytes: 82787 }
2021-03-14 08:39:22 STATE: Build for: browserBundle type: esm: { imports: 160, importBytes: 1305679, outputBytes: 1151520, outputFiles: 'dist/face-api.esm.js' } 2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 910, outputBytes: 1184871 }
2021-03-14 08:39:22 INFO: Compile typings: [ 'src/index.ts', [length]: 1 ] 2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 1418394, outputBytes: 1264631 }
2021-03-14 08:39:27 INFO: Update Change log: [ '/home/vlado/dev/face-api/CHANGELOG.md', [length]: 1 ] 2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 1418394, outputBytes: 1264150 }
2021-03-14 08:39:27 INFO: Generate TypeDocs: [ 'src/index.ts', [length]: 1 ] 2022-07-25 08:21:07 STATE: Typings: { input: 'src/index.ts', output: 'types/lib', files: 93 }
2022-07-25 08:21:09 STATE: TypeDoc: { input: 'src/index.ts', output: 'typedoc', objects: 154, generated: true }
2022-07-25 08:21:13 STATE: Lint: { locations: [ 'src/' ], files: 174, errors: 0, warnings: 0 }
2022-07-25 08:21:14 STATE: ChangeLog: { repository: 'https://github.com/vladmandic/face-api', branch: 'master', output: 'CHANGELOG.md' }
2022-07-25 08:21:14 INFO: Done...
2022-07-25 08:21:14 STATE: Copy: { input: 'types/lib/dist/tfjs.esm.d.ts' }
2022-07-25 08:21:15 STATE: API-Extractor: { succeeeded: true, errors: 0, warnings: 417 }
2022-07-25 08:21:15 INFO: FaceAPI Build complete...
``` ```
<br><hr><br> <br><hr><br>
@ -403,18 +449,14 @@ Build process uses script `build.js` that creates optimized build for each targe
## Note ## Note
This is updated **face-api.js** with latest available TensorFlow/JS as the original is not compatible with **tfjs 2.0+**. This is updated **face-api.js** with latest available TensorFlow/JS as the original is not compatible with **tfjs >=2.0**.
Forked from [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2** which was released on March 22nd, 2020 Forked from [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2** which was released on March 22nd, 2020
Currently based on **`TensorFlow/JS` 3.6.0** *Why?* I needed a FaceAPI that does not cause version conflict with newer versions of TensorFlow
And since the original FaceAPI was open-source, I've released this version as well
*Why?* I needed FaceAPI that does not cause version conflict with newer versions of TensorFlow Changes ended up being too large for a simple pull request and it ended up being a full-fledged version on its own
And since original FaceAPI was open-source, I've released this version as well Plus many features were added since the original inception
Changes ended up being too large for a simple pull request
and it ended up being a full-fledged version on its own
Plus many features were added since original inception
Although a lot of work has gone into this version of `FaceAPI` and it will continue to be maintained, Although a lot of work has gone into this version of `FaceAPI` and it will continue to be maintained,
at this time it is completely superseded by my newer library `Human` which covers the same use cases, at this time it is completely superseded by my newer library `Human` which covers the same use cases,
@ -429,14 +471,18 @@ but extends it with newer AI models, additional detection details, compatibility
Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2**: Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2**:
- Compatible with `TensorFlow/JS 2.0+ & 3.0+` - Compatible with `TensorFlow/JS 2.0+, 3.0+ and 4.0+`
Currently using **`TensorFlow/JS` 4.16**
Original `face-api.js` is based on `TFJS` **1.7.4**
- Compatible with `WebGL`, `CPU` and `WASM` TFJS Browser backends - Compatible with `WebGL`, `CPU` and `WASM` TFJS Browser backends
- Compatible with both `tfjs-node` and `tfjs-node-gpu` TFJS NodeJS backends - Compatible with both `tfjs-node` and `tfjs-node-gpu` TFJS NodeJS backends
- Updated all type castings for TypeScript type checking to `TypeScript 4.2` - Updated all type castings for TypeScript type checking to `TypeScript 5.3`
- Switched bundling from `UMD` to `ESM` + `CommonJS` with fallback to `IIFE` - Switched bundling from `UMD` to `ESM` + `CommonJS` with fallback to `IIFE`
Resulting code is optimized per-platform instead of being universal Resulting code is optimized per-platform instead of being universal
Fully tree shakable when imported as an `ESM` module Fully tree shakable when imported as an `ESM` module
Browser bundle process uses `ESBuild` instead of `Rollup` Browser bundle process uses `ESBuild` instead of `Rollup`
- Added separate `face-api` versions with `tfjs` pre-bundled and without `tfjs`
When using `-nobundle` version, user can load any version of `tfjs` manually
- Typescript build process now targets `ES2018` and instead of dual `ES5`/`ES6` - Typescript build process now targets `ES2018` and instead of dual `ES5`/`ES6`
Resulting code is clean ES2018 JavaScript without polyfills Resulting code is clean ES2018 JavaScript without polyfills
- Removed old tests, docs, examples - Removed old tests, docs, examples
@ -452,6 +498,7 @@ Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) vers
- Added `face angle` calculations that returns `roll`, `yaw` and `pitch` - Added `face angle` calculations that returns `roll`, `yaw` and `pitch`
- Added `typdoc` automatic API specification generation during build - Added `typdoc` automatic API specification generation during build
- Added `changelog` automatic generation during build - Added `changelog` automatic generation during build
- New process to generate **TypeDocs** bundle using API-Extractor
<br> <br>

38
api-extractor.json Normal file
View File

@ -0,0 +1,38 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
"mainEntryPointFilePath": "types/lib/src/index.d.ts",
"bundledPackages": ["@tensorflow/tfjs-core", "@tensorflow/tfjs-converter", "@types/offscreencanvas"],
"compiler": {
"skipLibCheck": false
},
"newlineKind": "lf",
"dtsRollup": {
"enabled": true,
"untrimmedFilePath": "types/face-api.d.ts"
},
"docModel": { "enabled": false },
"tsdocMetadata": {
"enabled": false
},
"apiReport": { "enabled": false },
"messages": {
"compilerMessageReporting": {
"default": {
"logLevel": "warning"
}
},
"extractorMessageReporting": {
"default": {
"logLevel": "warning"
},
"ae-missing-release-tag": {
"logLevel": "none"
}
},
"tsdocMessageReporting": {
"default": {
"logLevel": "warning"
}
}
}
}

77
build.js Normal file
View File

@ -0,0 +1,77 @@
const fs = require('fs');
const log = require('@vladmandic/pilogger');
const Build = require('@vladmandic/build').Build;
const APIExtractor = require('@microsoft/api-extractor');
const regEx = [
{ search: 'types="@webgpu/types/dist"', replace: 'path="../src/types/webgpu.d.ts"' },
{ search: 'types="offscreencanvas"', replace: 'path="../src/types/offscreencanvas.d.ts"' },
];
function copyFile(src, dst) {
if (!fs.existsSync(src)) {
log.warn('Copy:', { input: src, output: dst });
return;
}
log.state('Copy:', { input: src, output: dst });
const buffer = fs.readFileSync(src);
fs.writeFileSync(dst, buffer);
}
function writeFile(str, dst) {
log.state('Write:', { output: dst });
fs.writeFileSync(dst, str);
}
function regExFile(src, entries) {
if (!fs.existsSync(src)) {
log.warn('Filter:', { src });
return;
}
log.state('Filter:', { input: src });
for (const entry of entries) {
const buffer = fs.readFileSync(src, 'UTF-8');
const lines = buffer.split(/\r?\n/);
const out = [];
for (const line of lines) {
if (line.includes(entry.search)) out.push(line.replace(entry.search, entry.replace));
else out.push(line);
}
fs.writeFileSync(src, out.join('\n'));
}
}
const apiIgnoreList = ['ae-forgotten-export', 'ae-unresolved-link', 'tsdoc-param-tag-missing-hyphen'];
async function main() {
// run production build
const build = new Build();
await build.run('production');
// patch tfjs typedefs
log.state('Copy:', { input: 'types/lib/dist/tfjs.esm.d.ts' });
copyFile('types/lib/dist/tfjs.esm.d.ts', 'dist/tfjs.esm.d.ts');
// run api-extractor to create typedef rollup
const extractorConfig = APIExtractor.ExtractorConfig.loadFileAndPrepare('api-extractor.json');
const extractorResult = APIExtractor.Extractor.invoke(extractorConfig, {
localBuild: true,
showVerboseMessages: false,
messageCallback: (msg) => {
msg.handled = true;
if (msg.logLevel === 'none' || msg.logLevel === 'verbose' || msg.logLevel === 'info') return;
if (msg.sourceFilePath?.includes('/node_modules/')) return;
if (apiIgnoreList.reduce((prev, curr) => prev || msg.messageId.includes(curr), false)) return;
log.data('API', { level: msg.logLevel, category: msg.category, id: msg.messageId, file: msg.sourceFilePath, line: msg.sourceFileLine, text: msg.text });
},
});
log.state('API-Extractor:', { succeeeded: extractorResult.succeeded, errors: extractorResult.errorCount, warnings: extractorResult.warningCount });
regExFile('types/face-api.d.ts', regEx);
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm-nobundle.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-gpu.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-wasm.d.ts');
log.info('FaceAPI Build complete...');
}
main();

View File

@ -11,7 +11,7 @@
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon"> <link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
<script src="./index.js" type="module"></script> <script src="./index.js" type="module"></script>
</head> </head>
<body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0;"> <body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0; overflow-x: hidden;">
<div id="log"></div> <div id="log"></div>
</body> </body>
</html> </html>

View File

@ -1,25 +1,25 @@
import * as faceapi from '../dist/face-api.esm.js'; /**
* FaceAPI Demo for Browsers
* Loaded via `index.html`
*/
import * as faceapi from '../dist/face-api.esm.js'; // use when in dev mode
// import * as faceapi from '@vladmandic/face-api'; // use when downloading face-api as npm
// configuration options // configuration options
const modelPath = '../model/'; // path to model folder that will be loaded using http const modelPath = '../model/'; // path to model folder that will be loaded using http
// const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http // const modelPath = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model/'; // path to model folder that will be loaded using http
const imgSize = 800; // maximum image size in pixels const imgSize = 800; // maximum image size in pixels
const minScore = 0.3; // minimum score const minScore = 0.3; // minimum score
const maxResults = 10; // maximum number of results to return const maxResults = 10; // maximum number of results to return
const samples = ['sample1.jpg', 'sample2.jpg', 'sample3.jpg', 'sample4.jpg', 'sample5.jpg', 'sample6.jpg']; // sample images to be loaded using http const samples = ['sample1.jpg', 'sample2.jpg', 'sample3.jpg', 'sample4.jpg', 'sample5.jpg', 'sample6.jpg']; // sample images to be loaded using http
// helper function to pretty-print json object to string // helper function to pretty-print json object to string
function str(json) { const str = (json) => (json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '');
let text = '<font color="lightblue">';
text += json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '';
text += '</font>';
return text;
}
// helper function to print strings to html document as a log // helper function to print strings to html document as a log
function log(...txt) { function log(...txt) {
// eslint-disable-next-line no-console console.log(...txt); // eslint-disable-line no-console
console.log(...txt);
const div = document.getElementById('log'); const div = document.getElementById('log');
if (div) div.innerHTML += `<br>${txt}`; if (div) div.innerHTML += `<br>${txt}`;
} }
@ -33,11 +33,9 @@ function faces(name, title, id, data) {
canvas.style.position = 'absolute'; canvas.style.position = 'absolute';
canvas.style.left = `${img.offsetLeft}px`; canvas.style.left = `${img.offsetLeft}px`;
canvas.style.top = `${img.offsetTop}px`; canvas.style.top = `${img.offsetTop}px`;
// @ts-ignore
canvas.width = img.width; canvas.width = img.width;
// @ts-ignore
canvas.height = img.height; canvas.height = img.height;
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d', { willReadFrequently: true });
if (!ctx) return; if (!ctx) return;
// draw title // draw title
ctx.font = '1rem sans-serif'; ctx.font = '1rem sans-serif';
@ -53,6 +51,7 @@ function faces(name, title, id, data) {
ctx.beginPath(); ctx.beginPath();
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height); ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
ctx.stroke(); ctx.stroke();
// draw text labels
ctx.globalAlpha = 1; ctx.globalAlpha = 1;
ctx.fillText(`${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 18); ctx.fillText(`${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 18);
ctx.fillText(`${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 2); ctx.fillText(`${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 2);
@ -72,8 +71,7 @@ function faces(name, title, id, data) {
// helper function to draw processed image and its results // helper function to draw processed image and its results
function print(title, img, data) { function print(title, img, data) {
// eslint-disable-next-line no-console console.log('Results:', title, img, data); // eslint-disable-line no-console
console.log('Results:', title, img, data);
const el = new Image(); const el = new Image();
el.id = Math.floor(Math.random() * 100000).toString(); el.id = Math.floor(Math.random() * 100000).toString();
el.src = img; el.src = img;
@ -96,7 +94,7 @@ async function image(url) {
const canvas = document.createElement('canvas'); const canvas = document.createElement('canvas');
canvas.height = img.height; canvas.height = img.height;
canvas.width = img.width; canvas.width = img.width;
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d', { willReadFrequently: true });
if (ctx) ctx.drawImage(img, 0, 0, img.width, img.height); if (ctx) ctx.drawImage(img, 0, 0, img.width, img.height);
// return generated canvas to be used by tfjs during detection // return generated canvas to be used by tfjs during detection
resolve(canvas); resolve(canvas);
@ -111,18 +109,23 @@ async function main() {
log('FaceAPI Test'); log('FaceAPI Test');
// if you want to use wasm backend location for wasm binaries must be specified // if you want to use wasm backend location for wasm binaries must be specified
// await faceapi.tf.setWasmPaths('../node_modules/@tensorflow/tfjs-backend-wasm/dist/'); // await faceapi.tf?.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${faceapi.tf.version_core}/dist/`);
// await faceapi.tf.setBackend('wasm'); // await faceapi.tf?.setBackend('wasm');
// log(`WASM SIMD: ${await faceapi.tf?.env().getAsync('WASM_HAS_SIMD_SUPPORT')} Threads: ${await faceapi.tf?.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') ? 'Multi' : 'Single'}`);
// default is webgl backend // default is webgl backend
await faceapi.tf.setBackend('webgl'); await faceapi.tf.setBackend('webgl');
await faceapi.tf.ready();
// tfjs optimizations
if (faceapi.tf?.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) faceapi.tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
await faceapi.tf.enableProdMode(); await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready(); await faceapi.tf.ready();
// check version // check version
log(`Version: FaceAPI ${str(faceapi?.version.faceapi || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`); log(`Version: FaceAPI ${str(faceapi?.version || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`);
log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`); log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
// load face-api models // load face-api models
@ -140,16 +143,9 @@ async function main() {
const engine = await faceapi.tf.engine(); const engine = await faceapi.tf.engine();
log(`TF Engine State: ${str(engine.state)}`); log(`TF Engine State: ${str(engine.state)}`);
// const testT = faceapi.tf.tensor([0]);
// const testF = testT.toFloat();
// console.log(testT.print(), testF.print());
// testT.dispose();
// testF.dispose();
// loop through all images and try to process them // loop through all images and try to process them
log(`Start processing: ${samples.length} images ...<br>`); log(`Start processing: ${samples.length} images ...<br>`);
for (const img of samples) { for (const img of samples) {
// new line
document.body.appendChild(document.createElement('br')); document.body.appendChild(document.createElement('br'));
// load and resize image // load and resize image
const canvas = await image(img); const canvas = await image(img);
@ -163,7 +159,7 @@ async function main() {
.withFaceDescriptors() .withFaceDescriptors()
.withAgeAndGender(); .withAgeAndGender();
// print results to screen // print results to screen
print('TinyFace Detector', img, dataTinyYolo); print('TinyFace:', img, dataTinyYolo);
// actual model execution // actual model execution
const dataSSDMobileNet = await faceapi const dataSSDMobileNet = await faceapi
.detectAllFaces(canvas, optionsSSDMobileNet) .detectAllFaces(canvas, optionsSSDMobileNet)
@ -172,11 +168,9 @@ async function main() {
.withFaceDescriptors() .withFaceDescriptors()
.withAgeAndGender(); .withAgeAndGender();
// print results to screen // print results to screen
print('SSD MobileNet', img, dataSSDMobileNet); print('SSDMobileNet:', img, dataSSDMobileNet);
} catch (err) { } catch (err) {
log(`Image: ${img} Error during processing ${str(err)}`); log(`Image: ${img} Error during processing ${str(err)}`);
// eslint-disable-next-line no-console
console.error(err);
} }
} }
} }

View File

@ -1,15 +1,20 @@
// @ts-nocheck /**
* FaceAPI Demo for NodeJS
* - Uses external library [canvas](https://www.npmjs.com/package/canvas) to decode image
* - Loads image from provided param
* - Outputs results to console
*/
// canvas library provides full canvas (load/draw/write) functionality for nodejs
// must be installed manually as it just a demo dependency and not actual face-api dependency
const canvas = require('canvas'); // eslint-disable-line node/no-missing-require
const fs = require('fs'); const fs = require('fs');
const process = require('process');
const path = require('path'); const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require const process = require('process');
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
const tf = require('@tensorflow/tfjs-node'); const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require // const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
const canvas = require('canvas');
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
const modelPathRoot = '../model'; const modelPathRoot = '../model';
const imgPathRoot = './demo'; // modify to include your sample images const imgPathRoot = './demo'; // modify to include your sample images
@ -52,11 +57,9 @@ async function main() {
faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData }); faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
await faceapi.tf.setBackend('tensorflow'); await faceapi.tf.setBackend('tensorflow');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready(); await faceapi.tf.ready();
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`); log.state(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf?.getBackend()}`);
log.info('Loading FaceAPI models'); log.info('Loading FaceAPI models');
const modelPath = path.join(__dirname, modelPathRoot); const modelPath = path.join(__dirname, modelPathRoot);
@ -80,7 +83,7 @@ async function main() {
for (const face of result) print(face); for (const face of result) print(face);
} }
const t1 = process.hrtime.bigint(); const t1 = process.hrtime.bigint();
log.info('Processed', numImages, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms'); log.info('Processed', numImages, 'images in', Math.trunc(Number((t1 - t0).toString()) / 1000 / 1000), 'ms');
} else { } else {
const param = process.argv[2]; const param = process.argv[2];
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) { if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {

35
demo/node-face-compare.js Normal file
View File

@ -0,0 +1,35 @@
/**
* FaceAPI demo that loads two images and finds similarity most prominant face in each image
*/
const fs = require('fs');
const tf = require('@tensorflow/tfjs-node');
const faceapi = require('../dist/face-api.node');
let optionsSSDMobileNet;
const getDescriptors = async (imageFile) => {
const buffer = fs.readFileSync(imageFile);
const tensor = tf.node.decodeImage(buffer, 3);
const faces = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet)
.withFaceLandmarks()
.withFaceDescriptors();
tf.dispose(tensor);
return faces.map((face) => face.descriptor);
};
const main = async (file1, file2) => {
console.log('input images:', file1, file2); // eslint-disable-line no-console
await tf.ready();
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model');
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.5, maxResults: 1 });
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
const desc1 = await getDescriptors(file1);
const desc2 = await getDescriptors(file2);
const distance = faceapi.euclideanDistance(desc1[0], desc2[0]); // only compare first found face in each image
console.log('distance between most prominant detected faces:', distance); // eslint-disable-line no-console
console.log('similarity between most prominant detected faces:', 1 - distance); // eslint-disable-line no-console
};
main('demo/sample1.jpg', 'demo/sample2.jpg');

View File

@ -1,31 +1,54 @@
/**
* FaceAPI Demo for NodeJS
* - Uses external library [@canvas/image](https://www.npmjs.com/package/@canvas/image) to decode image
* - Loads image from provided param
* - Outputs results to console
*/
// @canvas/image can decode jpeg, png, webp
// must be installed manually as it just a demo dependency and not actual face-api dependency
const image = require('@canvas/image'); // eslint-disable-line node/no-missing-require
const fs = require('fs'); const fs = require('fs');
const tf = require('@tensorflow/tfjs-node'); const log = require('@vladmandic/pilogger');
const image = require('@canvas/image'); // @canvas/image can decode jpeg, png, webp const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi' const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
const modelPath = 'model/'; const modelPath = 'model/';
const imageFile = 'demo/sample1.jpg'; const imageFile = 'demo/sample1.jpg';
const ssdOptions = { minConfidence: 0.1, maxResults: 10 }; const ssdOptions = { minConfidence: 0.1, maxResults: 10 };
async function main() { async function main() {
log.header();
const buffer = fs.readFileSync(imageFile); // read image from disk const buffer = fs.readFileSync(imageFile); // read image from disk
const canvas = await image.imageFromBuffer(buffer); // decode to canvas const canvas = await image.imageFromBuffer(buffer); // decode to canvas
const imageData = image.getImageData(canvas); // read decoded image data from canvas const imageData = image.getImageData(canvas); // read decoded image data from canvas
console.log('image:', imageFile, canvas.width, canvas.height); log.info('image:', imageFile, canvas.width, canvas.height);
const tensor = tf.tidy(() => { // create tensor from image data const tensor = tf.tidy(() => { // create tensor from image data
const data = tf.tensor(Array.from(imageData.data), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array and flip to height x width const data = tf.tensor(Array.from(imageData?.data || []), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array and flip to height x width
const channels = tf.split(data, 4, 2); // split rgba to channels const channels = tf.split(data, 4, 2); // split rgba to channels
const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb
const reshape = tf.reshape(rgb, [1, canvas.height, canvas.width, 3]); // move extra dim from the end of tensor and use it as batch number instead const reshape = tf.reshape(rgb, [1, canvas.height, canvas.width, 3]); // move extra dim from the end of tensor and use it as batch number instead
return reshape; return reshape;
}); });
console.log('tensor:', tensor.shape, tensor.size); log.info('tensor:', tensor.shape, tensor.size);
// load models
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath); // load basic model only
const optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options(ssdOptions); // create options object const optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options(ssdOptions); // create options object
const result = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet); // run detection const result = await faceapi // run detection
console.log('results:', result); .detectAllFaces(tensor, optionsSSDMobileNet)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
log.data('results:', result.length);
} }
main(); main();

84
demo/node-match.js Normal file
View File

@ -0,0 +1,84 @@
/**
* FaceAPI Demo for NodeJS
* - Analyzes face descriptors from source (image file or folder containing multiple image files)
* - Analyzes face descriptor from target
* - Finds best match
*/
const fs = require('fs');
const path = require('path');
const log = require('@vladmandic/pilogger');
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
let optionsSSDMobileNet;
const minConfidence = 0.1;
const distanceThreshold = 0.5;
const modelPath = 'model';
const labeledFaceDescriptors = [];
async function initFaceAPI() {
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults: 1 });
}
async function getDescriptors(imageFile) {
const buffer = fs.readFileSync(imageFile);
const tensor = tf.node.decodeImage(buffer, 3);
const faces = await faceapi.detectAllFaces(tensor, optionsSSDMobileNet)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors();
tf.dispose(tensor);
return faces.map((face) => face.descriptor);
}
async function registerImage(inputFile) {
if (!inputFile.toLowerCase().endsWith('jpg') && !inputFile.toLowerCase().endsWith('png') && !inputFile.toLowerCase().endsWith('gif')) return;
log.data('Registered:', inputFile);
const descriptors = await getDescriptors(inputFile);
for (const descriptor of descriptors) {
const labeledFaceDescriptor = new faceapi.LabeledFaceDescriptors(inputFile, [descriptor]);
labeledFaceDescriptors.push(labeledFaceDescriptor);
}
}
async function findBestMatch(inputFile) {
const matcher = new faceapi.FaceMatcher(labeledFaceDescriptors, distanceThreshold);
const descriptors = await getDescriptors(inputFile);
const matches = [];
for (const descriptor of descriptors) {
const match = await matcher.findBestMatch(descriptor);
matches.push(match);
}
return matches;
}
async function main() {
log.header();
if (process.argv.length !== 4) {
log.error(process.argv[1], 'Expected <source image or folder> <target image>');
process.exit(1);
}
await initFaceAPI();
log.info('Input:', process.argv[2]);
if (fs.statSync(process.argv[2]).isFile()) {
await registerImage(process.argv[2]); // register image
} else if (fs.statSync(process.argv[2]).isDirectory()) {
const dir = fs.readdirSync(process.argv[2]);
for (const f of dir) await registerImage(path.join(process.argv[2], f)); // register all images in a folder
}
log.info('Comparing:', process.argv[3], 'Descriptors:', labeledFaceDescriptors.length);
if (labeledFaceDescriptors.length > 0) {
const bestMatch = await findBestMatch(process.argv[3]); // find best match to all registered images
log.data('Match:', bestMatch);
} else {
log.warn('No registered faces');
}
}
main();

View File

@ -1,14 +1,16 @@
// @ts-nocheck /**
* FaceAPI Demo for NodeJS
* - Used by `node-multiprocess.js`
*/
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
// workers actual import tfjs and faceapi modules // workers actual import tfjs and faceapi modules
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
const tf = require('@tensorflow/tfjs-node'); const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi' // const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
// options used by faceapi // options used by faceapi
const modelPathRoot = '../model'; const modelPathRoot = '../model';
@ -53,7 +55,7 @@ async function main() {
await faceapi.tf.enableProdMode(); await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false); await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready(); await faceapi.tf.ready();
log.state('Worker: PID:', process.pid, `TensorFlow/JS ${faceapi.tf.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf.getBackend()}`); log.state('Worker: PID:', process.pid, `TensorFlow/JS ${faceapi.tf.version_core} FaceAPI ${faceapi.version} Backend: ${faceapi.tf.getBackend()}`);
// and load and initialize facepi models // and load and initialize facepi models
const modelPath = path.join(__dirname, modelPathRoot); const modelPath = path.join(__dirname, modelPathRoot);

View File

@ -1,11 +1,14 @@
// @ts-nocheck /**
* FaceAPI Demo for NodeJS
* - Starts multiple worker processes and uses them as worker pool to process all input images
* - Images are enumerated in main process and sent for processing to worker processes via ipc
*/
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features
const child_process = require('child_process'); const child_process = require('child_process');
// note that main process import faceapi or tfjs at all // note that main process does not need to import faceapi or tfjs at all as processing is done in a worker process
const imgPathRoot = './demo'; // modify to include your sample images const imgPathRoot = './demo'; // modify to include your sample images
const numWorkers = 4; // how many workers will be started const numWorkers = 4; // how many workers will be started
@ -33,14 +36,14 @@ function waitCompletion() {
if (activeWorkers > 0) setImmediate(() => waitCompletion()); if (activeWorkers > 0) setImmediate(() => waitCompletion());
else { else {
t[1] = process.hrtime.bigint(); t[1] = process.hrtime.bigint();
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(parseInt(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(parseInt(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(parseInt(t[1] - t[2]) / numImages / 1000000), 'ms'); log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(Number(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(Number(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(Number(t[1] - t[2]) / numImages / 1000000), 'ms');
} }
} }
function measureLatency() { function measureLatency() {
t[3] = process.hrtime.bigint(); t[3] = process.hrtime.bigint();
const latencyInitialization = Math.trunc(parseInt(t[2] - t[0]) / 1000 / 1000); const latencyInitialization = Math.trunc(Number(t[2] - t[0]) / 1000 / 1000);
const latencyRoundTrip = Math.trunc(parseInt(t[3] - t[2]) / 1000 / 1000); const latencyRoundTrip = Math.trunc(Number(t[3] - t[2]) / 1000 / 1000);
log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip); log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip);
} }

31
demo/node-simple.js Normal file
View File

@ -0,0 +1,31 @@
/**
* FaceAPI Demo for NodeJS
* - Loads image
* - Outputs results to console
*/
const fs = require('fs');
const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
// const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
async function main() {
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model'); // load models from a specific patch
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
await faceapi.nets.ageGenderNet.loadFromDisk('model');
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
await faceapi.nets.faceExpressionNet.loadFromDisk('model');
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.1, maxResults: 10 }); // set model options
const buffer = fs.readFileSync('demo/sample1.jpg'); // load jpg image as binary
const decodeT = faceapi.tf.node.decodeImage(buffer, 3); // decode binary buffer to rgb tensor
const expandT = faceapi.tf.expandDims(decodeT, 0); // add batch dimension to tensor
const result = await faceapi.detectAllFaces(expandT, options) // run detection
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
faceapi.tf.dispose([decodeT, expandT]); // dispose tensors to avoid memory leaks
console.log({ result }); // eslint-disable-line no-console
}
main();

View File

@ -1,97 +1,53 @@
// @ts-nocheck /**
* FaceAPI Demo for NodeJS using WASM
* - Loads WASM binaries from external CDN
* - Loads image
* - Outputs results to console
*/
const fs = require('fs'); const fs = require('fs');
const process = require('process'); const image = require('@canvas/image'); // eslint-disable-line node/no-missing-require
const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars
const tf = require('@tensorflow/tfjs'); const tf = require('@tensorflow/tfjs');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require const wasm = require('@tensorflow/tfjs-backend-wasm');
require('@tensorflow/tfjs-backend-wasm'); const faceapi = require('../dist/face-api.node-wasm.js'); // use this when using face-api in dev mode
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars
const canvas = require('canvas');
const faceapi = require('../dist/face-api.node-cpu.js'); // this is equivalent to '@vladmandic/faceapi'
const modelPathRoot = '../model'; async function readImage(imageFile) {
const imgPathRoot = './demo'; // modify to include your sample images const buffer = fs.readFileSync(imageFile); // read image from disk
const minConfidence = 0.15; const canvas = await image.imageFromBuffer(buffer); // decode to canvas
const maxResults = 5; const imageData = image.getImageData(canvas); // read decoded image data from canvas
let optionsSSDMobileNet; const tensor = tf.tidy(() => { // create tensor from image data
const data = tf.tensor(Array.from(imageData?.data || []), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array and flip to height x width
async function image(input) { const channels = tf.split(data, 4, 2); // split rgba to channels
const img = await canvas.loadImage(input); const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb
const c = canvas.createCanvas(img.width, img.height); const squeeze = tf.squeeze(rgb); // move extra dim from the end of tensor and use it as batch number instead
const ctx = c.getContext('2d'); return squeeze;
ctx.drawImage(img, 0, 0, img.width, img.height); });
// const out = fs.createWriteStream('test.jpg'); console.log(`Image: ${imageFile} [${canvas.width} x ${canvas.height} Tensor: ${tensor.shape}, Size: ${tensor.size}`); // eslint-disable-line no-console
// const stream = c.createJPEGStream({ quality: 0.6, progressive: true, chromaSubsampling: true }); return tensor;
// stream.pipe(out);
return c;
} }
async function detect(tensor) { async function main() {
const result = await faceapi wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/', true);
.detectAllFaces(tensor, optionsSSDMobileNet) await tf.setBackend('wasm');
await tf.ready();
console.log(`Version: FaceAPI ${faceapi.version} TensorFlow/JS ${tf.version_core} Backend: ${faceapi.tf.getBackend()}`); // eslint-disable-line no-console
await faceapi.nets.ssdMobilenetv1.loadFromDisk('model'); // load models from a specific patch
await faceapi.nets.faceLandmark68Net.loadFromDisk('model');
await faceapi.nets.ageGenderNet.loadFromDisk('model');
await faceapi.nets.faceRecognitionNet.loadFromDisk('model');
await faceapi.nets.faceExpressionNet.loadFromDisk('model');
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.1, maxResults: 10 }); // set model options
const tensor = await readImage('demo/sample1.jpg');
const t0 = performance.now();
const result = await faceapi.detectAllFaces(tensor, options) // run detection
.withFaceLandmarks() .withFaceLandmarks()
.withFaceExpressions() .withFaceExpressions()
.withFaceDescriptors() .withFaceDescriptors()
.withAgeAndGender(); .withAgeAndGender();
return result; tf.dispose(tensor); // dispose tensors to avoid memory leaks
} const t1 = performance.now();
console.log('Time', t1 - t0); // eslint-disable-line no-console
function print(face) { console.log('Result', result); // eslint-disable-line no-console
const expression = Object.entries(face.expressions).reduce((acc, val) => ((val[1] > acc[1]) ? val : acc), ['', 0]);
const box = [face.alignedRect._box._x, face.alignedRect._box._y, face.alignedRect._box._width, face.alignedRect._box._height];
const gender = `Gender: ${Math.round(100 * face.genderProbability)}% ${face.gender}`;
log.data(`Detection confidence: ${Math.round(100 * face.detection._score)}% ${gender} Age: ${Math.round(10 * face.age) / 10} Expression: ${Math.round(100 * expression[1])}% ${expression[0]} Box: ${box.map((a) => Math.round(a))}`);
}
async function main() {
log.header();
log.info('FaceAPI single-process test');
faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
await faceapi.tf.setBackend('wasm');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
log.info('Loading FaceAPI models');
const modelPath = path.join(__dirname, modelPathRoot);
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
if (process.argv.length !== 3) {
const t0 = process.hrtime.bigint();
const dir = fs.readdirSync(imgPathRoot);
let numImages = 0;
for (const img of dir) {
if (!img.toLocaleLowerCase().endsWith('.jpg')) continue;
numImages += 1;
const c = await image(path.join(imgPathRoot, img));
const result = await detect(c);
log.data('Image:', img, 'Detected faces:', result.length);
for (const face of result) print(face);
}
const t1 = process.hrtime.bigint();
log.info('Processed', numImages, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
} else {
const param = process.argv[2];
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {
const c = await image(param);
const result = await detect(c);
log.data('Image:', param, 'Detected faces:', result.length);
for (const face of result) print(face);
}
}
} }
main(); main();

View File

@ -1,21 +1,25 @@
// @ts-nocheck /**
* FaceAPI Demo for NodeJS
* - Uses external library [node-fetch](https://www.npmjs.com/package/node-fetch) to load images via http
* - Loads image from provided param
* - Outputs results to console
*/
const fs = require('fs'); const fs = require('fs');
const process = require('process'); const process = require('process');
const path = require('path'); const path = require('path');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const fetch = require('node-fetch').default; const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before face-api
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require const faceapi = require('../dist/face-api.node.js'); // use this when using face-api in dev mode
const tf = require('@tensorflow/tfjs-node'); // const faceapi = require('@vladmandic/face-api'); // use this when face-api is installed as module (majority of use cases)
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
const modelPathRoot = '../model'; const modelPathRoot = '../model';
const imgPathRoot = './demo'; // modify to include your sample images const imgPathRoot = './demo'; // modify to include your sample images
const minConfidence = 0.15; const minConfidence = 0.15;
const maxResults = 5; const maxResults = 5;
let optionsSSDMobileNet; let optionsSSDMobileNet;
let fetch; // dynamically imported later
async function image(input) { async function image(input) {
// read input image file and create tensor to be used for processing // read input image file and create tensor to be used for processing
@ -63,7 +67,7 @@ async function detect(tensor) {
} }
} }
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
function detectPromise(tensor) { function detectPromise(tensor) {
return new Promise((resolve) => faceapi return new Promise((resolve) => faceapi
.detectAllFaces(tensor, optionsSSDMobileNet) .detectAllFaces(tensor, optionsSSDMobileNet)
@ -89,12 +93,13 @@ async function main() {
log.header(); log.header();
log.info('FaceAPI single-process test'); log.info('FaceAPI single-process test');
// eslint-disable-next-line node/no-extraneous-import
fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-missing-import
await faceapi.tf.setBackend('tensorflow'); await faceapi.tf.setBackend('tensorflow');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready(); await faceapi.tf.ready();
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`); log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version} Backend: ${faceapi.tf?.getBackend()}`);
log.info('Loading FaceAPI models'); log.info('Loading FaceAPI models');
const modelPath = path.join(__dirname, modelPathRoot); const modelPath = path.join(__dirname, modelPathRoot);
@ -105,7 +110,7 @@ async function main() {
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath); await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults }); optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence, maxResults });
if (process.argv.length !== 3) { if (process.argv.length !== 4) {
const t0 = process.hrtime.bigint(); const t0 = process.hrtime.bigint();
const dir = fs.readdirSync(imgPathRoot); const dir = fs.readdirSync(imgPathRoot);
for (const img of dir) { for (const img of dir) {
@ -117,7 +122,7 @@ async function main() {
tensor.dispose(); tensor.dispose();
} }
const t1 = process.hrtime.bigint(); const t1 = process.hrtime.bigint();
log.info('Processed', dir.length, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms'); log.info('Processed', dir.length, 'images in', Math.trunc(Number((t1 - t0)) / 1000 / 1000), 'ms');
} else { } else {
const param = process.argv[2]; const param = process.argv[2];
if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) { if (fs.existsSync(param) || param.startsWith('http:') || param.startsWith('https:')) {

View File

@ -1,8 +1,14 @@
import * as faceapi from '../dist/face-api.esm.js'; /**
* FaceAPI Demo for Browsers
* Loaded via `webcam.html`
*/
import * as faceapi from '../dist/face-api.esm.js'; // use when in dev mode
// import * as faceapi from '@vladmandic/face-api'; // use when downloading face-api as npm
// configuration options // configuration options
const modelPath = '../model/'; // path to model folder that will be loaded using http const modelPath = '../model/'; // path to model folder that will be loaded using http
// const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http // const modelPath = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api/model/'; // path to model folder that will be loaded using http
const minScore = 0.2; // minimum score const minScore = 0.2; // minimum score
const maxResults = 5; // maximum number of results to return const maxResults = 5; // maximum number of results to return
let optionsSSDMobileNet; let optionsSSDMobileNet;
@ -17,15 +23,14 @@ function str(json) {
// helper function to print strings to html document as a log // helper function to print strings to html document as a log
function log(...txt) { function log(...txt) {
// eslint-disable-next-line no-console console.log(...txt); // eslint-disable-line no-console
console.log(...txt);
const div = document.getElementById('log'); const div = document.getElementById('log');
if (div) div.innerHTML += `<br>${txt}`; if (div) div.innerHTML += `<br>${txt}`;
} }
// helper function to draw detected faces // helper function to draw detected faces
function drawFaces(canvas, data, fps) { function drawFaces(canvas, data, fps) {
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d', { willReadFrequently: true });
if (!ctx) return; if (!ctx) return;
ctx.clearRect(0, 0, canvas.width, canvas.height); ctx.clearRect(0, 0, canvas.width, canvas.height);
// draw title // draw title
@ -42,18 +47,18 @@ function drawFaces(canvas, data, fps) {
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height); ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
ctx.stroke(); ctx.stroke();
ctx.globalAlpha = 1; ctx.globalAlpha = 1;
// const expression = person.expressions.sort((a, b) => Object.values(a)[0] - Object.values(b)[0]); // draw text labels
const expression = Object.entries(person.expressions).sort((a, b) => b[1] - a[1]); const expression = Object.entries(person.expressions).sort((a, b) => b[1] - a[1]);
ctx.fillStyle = 'black'; ctx.fillStyle = 'black';
ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 59); ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 59);
ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 41); ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 41);
ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 23); ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 23);
ctx.fillText(`roll:${person.angle.roll.toFixed(3)} pitch:${person.angle.pitch.toFixed(3)} yaw:${person.angle.yaw.toFixed(3)}`, person.detection.box.x, person.detection.box.y - 5); ctx.fillText(`roll:${person.angle.roll}° pitch:${person.angle.pitch}° yaw:${person.angle.yaw}°`, person.detection.box.x, person.detection.box.y - 5);
ctx.fillStyle = 'lightblue'; ctx.fillStyle = 'lightblue';
ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 60); ctx.fillText(`gender: ${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 60);
ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 42); ctx.fillText(`expression: ${Math.round(100 * expression[0][1])}% ${expression[0][0]}`, person.detection.box.x, person.detection.box.y - 42);
ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 24); ctx.fillText(`age: ${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 24);
ctx.fillText(`roll:${person.angle.roll.toFixed(3)} pitch:${person.angle.pitch.toFixed(3)} yaw:${person.angle.yaw.toFixed(3)}`, person.detection.box.x, person.detection.box.y - 6); ctx.fillText(`roll:${person.angle.roll}° pitch:${person.angle.pitch}° yaw:${person.angle.yaw}°`, person.detection.box.x, person.detection.box.y - 6);
// draw face points for each face // draw face points for each face
ctx.globalAlpha = 0.8; ctx.globalAlpha = 0.8;
ctx.fillStyle = 'lightblue'; ctx.fillStyle = 'lightblue';
@ -61,7 +66,6 @@ function drawFaces(canvas, data, fps) {
for (let i = 0; i < person.landmarks.positions.length; i++) { for (let i = 0; i < person.landmarks.positions.length; i++) {
ctx.beginPath(); ctx.beginPath();
ctx.arc(person.landmarks.positions[i].x, person.landmarks.positions[i].y, pointSize, 0, 2 * Math.PI); ctx.arc(person.landmarks.positions[i].x, person.landmarks.positions[i].y, pointSize, 0, 2 * Math.PI);
// ctx.fillText(`${i}`, person.landmarks.positions[i].x + 4, person.landmarks.positions[i].y + 4);
ctx.fill(); ctx.fill();
} }
} }
@ -95,7 +99,6 @@ async function setupCamera() {
const canvas = document.getElementById('canvas'); const canvas = document.getElementById('canvas');
if (!video || !canvas) return null; if (!video || !canvas) return null;
let msg = '';
log('Setting up camera'); log('Setting up camera');
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https // setup webcam. note that navigator.mediaDevices requires that page is accessed via https
if (!navigator.mediaDevices) { if (!navigator.mediaDevices) {
@ -103,23 +106,19 @@ async function setupCamera() {
return null; return null;
} }
let stream; let stream;
const constraints = { const constraints = { audio: false, video: { facingMode: 'user', resizeMode: 'crop-and-scale' } };
audio: false,
video: { facingMode: 'user', resizeMode: 'crop-and-scale' },
};
if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth }; if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth };
else constraints.video.height = { ideal: window.innerHeight }; else constraints.video.height = { ideal: window.innerHeight };
try { try {
stream = await navigator.mediaDevices.getUserMedia(constraints); stream = await navigator.mediaDevices.getUserMedia(constraints);
} catch (err) { } catch (err) {
if (err.name === 'PermissionDeniedError' || err.name === 'NotAllowedError') msg = 'camera permission denied'; if (err.name === 'PermissionDeniedError' || err.name === 'NotAllowedError') log(`Camera Error: camera permission denied: ${err.message || err}`);
else if (err.name === 'SourceUnavailableError') msg = 'camera not available'; if (err.name === 'SourceUnavailableError') log(`Camera Error: camera not available: ${err.message || err}`);
log(`Camera Error: ${msg}: ${err.message || err}`);
return null; return null;
} }
// @ts-ignore if (stream) {
if (stream) video.srcObject = stream; video.srcObject = stream;
else { } else {
log('Camera Error: stream empty'); log('Camera Error: stream empty');
return null; return null;
} }
@ -128,31 +127,23 @@ async function setupCamera() {
if (settings.deviceId) delete settings.deviceId; if (settings.deviceId) delete settings.deviceId;
if (settings.groupId) delete settings.groupId; if (settings.groupId) delete settings.groupId;
if (settings.aspectRatio) settings.aspectRatio = Math.trunc(100 * settings.aspectRatio) / 100; if (settings.aspectRatio) settings.aspectRatio = Math.trunc(100 * settings.aspectRatio) / 100;
log(`Camera active: ${track.label}`); // ${str(constraints)} log(`Camera active: ${track.label}`);
log(`Camera settings: ${str(settings)}`); log(`Camera settings: ${str(settings)}`);
canvas.addEventListener('click', () => { canvas.addEventListener('click', () => {
// @ts-ignore
if (video && video.readyState >= 2) { if (video && video.readyState >= 2) {
// @ts-ignore
if (video.paused) { if (video.paused) {
// @ts-ignore
video.play(); video.play();
detectVideo(video, canvas); detectVideo(video, canvas);
} else { } else {
// @ts-ignore
video.pause(); video.pause();
} }
} }
// @ts-ignore
log(`Camera state: ${video.paused ? 'paused' : 'playing'}`); log(`Camera state: ${video.paused ? 'paused' : 'playing'}`);
}); });
return new Promise((resolve) => { return new Promise((resolve) => {
video.onloadeddata = async () => { video.onloadeddata = async () => {
// @ts-ignore
canvas.width = video.videoWidth; canvas.width = video.videoWidth;
// @ts-ignore
canvas.height = video.videoHeight; canvas.height = video.videoHeight;
// @ts-ignore
video.play(); video.play();
detectVideo(video, canvas); detectVideo(video, canvas);
resolve(true); resolve(true);
@ -170,7 +161,6 @@ async function setupFaceAPI() {
await faceapi.nets.faceRecognitionNet.load(modelPath); await faceapi.nets.faceRecognitionNet.load(modelPath);
await faceapi.nets.faceExpressionNet.load(modelPath); await faceapi.nets.faceExpressionNet.load(modelPath);
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults }); optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
// check tf engine state // check tf engine state
log(`Models loaded: ${str(faceapi.tf.engine().state.numTensors)} tensors`); log(`Models loaded: ${str(faceapi.tf.engine().state.numTensors)} tensors`);
} }
@ -180,19 +170,21 @@ async function main() {
log('FaceAPI WebCam Test'); log('FaceAPI WebCam Test');
// if you want to use wasm backend location for wasm binaries must be specified // if you want to use wasm backend location for wasm binaries must be specified
// await faceapi.tf.setWasmPaths('../node_modules/@tensorflow/tfjs-backend-wasm/dist/'); // await faceapi.tf?.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${faceapi.tf.version_core}/dist/`);
// await faceapi.tf.setBackend('wasm'); // await faceapi.tf?.setBackend('wasm');
// log(`WASM SIMD: ${await faceapi.tf?.env().getAsync('WASM_HAS_SIMD_SUPPORT')} Threads: ${await faceapi.tf?.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') ? 'Multi' : 'Single'}`);
// default is webgl backend // default is webgl backend
await faceapi.tf.setBackend('webgl'); await faceapi.tf.setBackend('webgl');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready(); await faceapi.tf.ready();
// tfjs optimizations
if (faceapi.tf?.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) faceapi.tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
if (faceapi.tf?.env().flagRegistry.WEBGL_EXP_CONV) faceapi.tf.env().set('WEBGL_EXP_CONV', true);
// check version // check version
log(`Version: FaceAPI ${str(faceapi?.version.faceapi || '(not loaded)')} TensorFlow/JS ${str(faceapi?.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi?.tf?.getBackend() || '(not loaded)')}`); log(`Version: FaceAPI ${str(faceapi?.version || '(not loaded)')} TensorFlow/JS ${str(faceapi.tf?.version_core || '(not loaded)')} Backend: ${str(faceapi.tf?.getBackend() || '(not loaded)')}`);
// log(`Flags: ${JSON.stringify(faceapi?.tf?.ENV.flags || { tf: 'not loaded' })}`);
await setupFaceAPI(); await setupFaceAPI();
await setupCamera(); await setupCamera();

1
dist/face-api.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

1
dist/face-api.esm-nobundle.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

1
dist/face-api.esm.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

61487
dist/face-api.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

61494
dist/face-api.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

1
dist/face-api.node-gpu.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

1
dist/face-api.node-wasm.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

7
dist/face-api.node-wasm.js vendored Normal file

File diff suppressed because one or more lines are too long

1
dist/face-api.node.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export * from '../types/face-api';

4664
dist/face-api.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

28
dist/tfjs.esm.d.ts vendored Normal file
View File

@ -0,0 +1,28 @@
/*
import '@tensorflow/tfjs-core';
import '@tensorflow/tfjs-core/dist/types';
import '@tensorflow/tfjs-core/dist/register_all_gradients';
import '@tensorflow/tfjs-core/dist/public/chained_ops/register_all_chained_ops';
import '@tensorflow/tfjs-data';
import '@tensorflow/tfjs-layers';
import '@tensorflow/tfjs-converter';
import '@tensorflow/tfjs-backend-cpu';
import '@tensorflow/tfjs-backend-webgl';
import '@tensorflow/tfjs-backend-wasm';
import '@tensorflow/tfjs-backend-webgpu';
*/
export declare const version: {
'tfjs-core': string;
'tfjs-backend-cpu': string;
'tfjs-backend-webgl': string;
'tfjs-data': string;
'tfjs-layers': string;
'tfjs-converter': string;
tfjs: string;
};
export { io, browser, image } from '@tensorflow/tfjs-core';
export { tensor, tidy, softmax, unstack, relu, add, conv2d, cast, zeros, concat, avgPool, stack, fill, transpose, tensor1d, tensor2d, tensor3d, tensor4d, maxPool, matMul, mul, sub, scalar } from '@tensorflow/tfjs-core';
export { div, pad, slice, reshape, slice3d, expandDims, depthwiseConv2d, separableConv2d, sigmoid, exp, tile, batchNorm, clipByValue } from '@tensorflow/tfjs-core';
export { ENV, Variable, Tensor, TensorLike, Rank, Tensor1D, Tensor2D, Tensor3D, Tensor4D, Tensor5D, NamedTensorMap } from '@tensorflow/tfjs-core';

59576
dist/tfjs.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

9
dist/tfjs.version.d.ts vendored Normal file
View File

@ -0,0 +1,9 @@
export declare const version: {
'tfjs-core': string;
'tfjs-backend-cpu': string;
'tfjs-backend-webgl': string;
'tfjs-data': string;
'tfjs-layers': string;
'tfjs-converter': string;
tfjs: string;
};

7
dist/tfjs.version.js vendored Normal file
View File

@ -0,0 +1,7 @@
/*
Face-API
homepage: <https://github.com/vladmandic/face-api>
author: <https://github.com/vladmandic>'
*/
var e="4.22.0";var s="4.22.0";var t="4.22.0";var n="4.22.0";var i="4.22.0";var w={tfjs:e,"tfjs-core":e,"tfjs-converter":s,"tfjs-backend-cpu":t,"tfjs-backend-webgl":n,"tfjs-backend-wasm":i};export{w as version};

View File

@ -1,12 +1,12 @@
{ {
"name": "@vladmandic/face-api", "name": "@vladmandic/face-api",
"version": "1.2.2", "version": "1.7.15",
"description": "FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS", "description": "FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS",
"sideEffects": false, "sideEffects": false,
"main": "dist/face-api.node.js", "main": "dist/face-api.node.js",
"module": "dist/face-api.esm.js", "module": "dist/face-api.esm.js",
"browser": "dist/face-api.esm.js", "browser": "dist/face-api.esm.js",
"types": "types/index.d.ts", "types": "types/face-api.d.ts",
"author": "Vladimir Mandic <mandic00@live.com>", "author": "Vladimir Mandic <mandic00@live.com>",
"bugs": { "bugs": {
"url": "https://github.com/vladmandic/face-api/issues" "url": "https://github.com/vladmandic/face-api/issues"
@ -14,18 +14,19 @@
"homepage": "https://vladmandic.github.io/face-api/demo/webcam.html", "homepage": "https://vladmandic.github.io/face-api/demo/webcam.html",
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">=12.0.0" "node": ">=14.0.0"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/vladmandic/face-api.git" "url": "git+https://github.com/vladmandic/face-api.git"
}, },
"scripts": { "scripts": {
"start": "node --trace-warnings demo/node.js", "start": "node --no-warnings demo/node.js",
"dev": "node --trace-warnings server/serve.js", "build": "node build.js",
"build": "rimraf dist/* types/* typedoc/* && node server/build.js", "dev": "build --profile development",
"lint": "eslint src/**/* demo/*.js server/*.js", "lint": "eslint src/ demo/",
"test": "node --trace-warnings test/test-node.js" "test": "node --trace-warnings test/test-node.js",
"scan": "npx auditjs@latest ossi --dev --quiet"
}, },
"keywords": [ "keywords": [
"face-api", "face-api",
@ -41,31 +42,38 @@
"tfjs" "tfjs"
], ],
"devDependencies": { "devDependencies": {
"@canvas/image": "^1.0.1", "@canvas/image": "^2.0.0",
"@tensorflow/tfjs": "^3.6.0", "@microsoft/api-extractor": "^7.49.2",
"@tensorflow/tfjs-backend-wasm": "^3.6.0", "@tensorflow/tfjs": "^4.22.0",
"@tensorflow/tfjs-node": "^3.6.1", "@tensorflow/tfjs-backend-cpu": "^4.22.0",
"@tensorflow/tfjs-node-gpu": "^3.6.1", "@tensorflow/tfjs-backend-wasm": "^4.22.0",
"@types/node": "^15.0.1", "@tensorflow/tfjs-backend-webgl": "^4.22.0",
"@typescript-eslint/eslint-plugin": "^4.22.0", "@tensorflow/tfjs-backend-webgpu": "4.22.0",
"@typescript-eslint/parser": "^4.22.0", "@tensorflow/tfjs-converter": "^4.22.0",
"@vladmandic/pilogger": "^0.2.17", "@tensorflow/tfjs-core": "^4.22.0",
"canvas": "^2.7.0", "@tensorflow/tfjs-data": "^4.22.0",
"chokidar": "^3.5.1", "@tensorflow/tfjs-layers": "^4.22.0",
"dayjs": "^1.10.4", "@tensorflow/tfjs-node": "^4.22.0",
"esbuild": "^0.11.16", "@tensorflow/tfjs-node-gpu": "^4.22.0",
"eslint": "^7.25.0", "@types/node": "^22.13.1",
"eslint-config-airbnb-base": "^14.2.1", "@types/offscreencanvas": "^2019.7.3",
"eslint-plugin-import": "^2.22.1", "@typescript-eslint/eslint-plugin": "^8.5.0",
"eslint-plugin-json": "^2.1.2", "@typescript-eslint/parser": "^8.5.0",
"@vladmandic/build": "^0.10.2",
"@vladmandic/pilogger": "^0.5.1",
"ajv": "^8.17.1",
"esbuild": "^0.24.2",
"eslint": "8.57.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-plugin-import": "^2.30.0",
"eslint-plugin-json": "^4.0.1",
"eslint-plugin-node": "^11.1.0", "eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^5.1.0", "eslint-plugin-promise": "^7.1.0",
"node-fetch": "^2.6.1", "node-fetch": "^3.3.2",
"rimraf": "^3.0.2", "rimraf": "^6.0.1",
"seedrandom": "^3.0.5", "seedrandom": "^3.0.5",
"simple-git": "^2.38.0", "tslib": "^2.8.1",
"tslib": "^2.2.0", "typedoc": "^0.27.6",
"typedoc": "^0.20.36", "typescript": "5.7.3"
"typescript": "^4.2.4"
} }
} }

View File

@ -1,237 +0,0 @@
/* eslint-disable import/no-extraneous-dependencies */
/* eslint-disable node/no-unpublished-require */
const esbuild = require('esbuild');
const ts = require('typescript');
const log = require('@vladmandic/pilogger');
const TypeDoc = require('typedoc');
const changelog = require('./changelog');
const banner = { js: `
/*
Face-API
homepage: <https://github.com/vladmandic/face-api>
author: <https://github.com/vladmandic>'
*/
` };
let td = null;
// tsc configuration
const tsconfig = {
noEmitOnError: false,
target: ts.ScriptTarget.ES2018,
module: ts.ModuleKind.ES2020,
// outFile: "dist/face-api.d.ts",
outDir: 'types/',
declaration: true,
emitDeclarationOnly: true,
emitDecoratorMetadata: true,
experimentalDecorators: true,
skipLibCheck: true,
strictNullChecks: true,
baseUrl: './',
paths: {
tslib: ['node_modules/tslib/tslib.d.ts'],
},
};
// common configuration
const common = {
banner,
minifyWhitespace: false,
minifyIdentifiers: false,
minifySyntax: false,
bundle: true,
sourcemap: true,
metafile: true,
logLevel: 'error',
target: 'es2018',
// tsconfig: './tsconfig.json',
};
const targets = {
node: {
tfjs: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/tfjs/tf-node.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.node.js',
external: ['@tensorflow'],
},
},
nodeGPU: {
tfjs: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/tfjs/tf-node-gpu.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.node-gpu.js',
external: ['@tensorflow'],
},
},
nodeCPU: {
tfjs: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/tfjs/tf-node-cpu.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.node-cpu.js',
external: ['@tensorflow'],
},
},
browserNoBundle: {
tfjs: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/tfjs/tf-browser.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['fs', 'buffer', 'util', 'os', '@tensorflow'],
},
esm: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.esm-nobundle.js',
external: ['fs', 'buffer', 'util', 'os', '@tensorflow', 'tfjs.esm.js'],
},
},
browserBundle: {
tfjs: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/tfjs/tf-browser.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['fs', 'buffer', 'util', 'os'],
},
iife: {
platform: 'browser',
format: 'iife',
globalName: 'faceapi',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.js',
external: ['fs', 'buffer', 'util', 'os'],
},
esm: {
platform: 'browser',
format: 'esm',
entryPoints: ['src/index.ts'],
outfile: 'dist/face-api.esm.js',
external: ['fs', 'buffer', 'util', 'os'],
},
},
};
async function getStats(json) {
const stats = {};
if (json && json.metafile.inputs && json.metafile.outputs) {
for (const [key, val] of Object.entries(json.metafile.inputs)) {
if (key.startsWith('node_modules')) {
stats.modules = (stats.modules || 0) + 1;
stats.moduleBytes = (stats.moduleBytes || 0) + val.bytes;
} else {
stats.imports = (stats.imports || 0) + 1;
stats.importBytes = (stats.importBytes || 0) + val.bytes;
}
}
const files = [];
for (const [key, val] of Object.entries(json.metafile.outputs)) {
if (!key.endsWith('.map')) {
files.push(key);
stats.outputBytes = (stats.outputBytes || 0) + val.bytes;
}
}
stats.outputFiles = files.join(', ');
}
return stats;
}
function compile(fileNames, options) {
log.info('Compile typings:', fileNames);
const program = ts.createProgram(fileNames, options);
const emit = program.emit();
const diag = ts
.getPreEmitDiagnostics(program)
.concat(emit.diagnostics);
for (const info of diag) {
// @ts-ignore
const msg = info.messageText.messageText || info.messageText;
if (msg.includes('package.json')) continue;
if (msg.includes('Expected 0 arguments, but got 1')) continue;
if (info.file) {
const pos = info.file.getLineAndCharacterOfPosition(info.start || 0);
log.error(`TSC: ${info.file.fileName} [${pos.line + 1},${pos.character + 1}]:`, msg);
} else {
log.error('TSC:', msg);
}
}
}
async function typedoc(entryPoint) {
log.info('Generate TypeDocs:', entryPoint);
if (!td) {
td = new TypeDoc.Application();
td.options.addReader(new TypeDoc.TSConfigReader());
td.bootstrap({ entryPoints: entryPoint });
}
const project = td.convert();
const result = project ? await td.generateDocs(project, 'typedoc') : null;
if (result) log.warn('TypeDoc:', result);
}
// rebuild on file change
async function build(f, msg, dev = false) {
log.info('Build: file', msg, f, 'target:', common.target);
try {
// rebuild all target groups and types
for (const [targetGroupName, targetGroup] of Object.entries(targets)) {
for (const [targetName, targetOptions] of Object.entries(targetGroup)) {
// if triggered from watch mode, rebuild only browser bundle
// if ((require.main !== module) && (targetGroupName !== 'browserBundle')) continue;
// @ts-ignore
const meta = await esbuild.build({ ...common, ...targetOptions });
const stats = await getStats(meta);
log.state(`Build for: ${targetGroupName} type: ${targetName}:`, stats);
}
}
} catch (err) {
// catch errors and print where it occured
log.error('Build error', JSON.stringify(err.errors || err, null, 2));
if (require.main === module) process.exit(1);
}
if (!dev) {
// generate typings & typedoc only when run as explict build
await compile(targets.browserBundle.esm.entryPoints, tsconfig);
await changelog.update('../CHANGELOG.md');
await typedoc(targets.browserBundle.esm.entryPoints);
}
if (require.main === module) process.exit(0);
}
if (require.main === module) {
log.header();
build('all', 'startup');
} else {
exports.build = build;
}

View File

@ -1,58 +0,0 @@
const fs = require('fs');
const path = require('path');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const dayjs = require('dayjs');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const simpleGit = require('simple-git/promise');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const logger = require('@vladmandic/pilogger');
const app = require('../package.json');
const git = simpleGit();
let text = `# ${app.name}
Version: **${app.version}**
Description: **${app.description}**
Author: **${app.author}**
License: **${app.license}** </LICENSE>
Repository: **<${app.repository.url}>**
## Changelog
`;
async function update(f) {
const gitLog = await git.log();
// @ts-ignore
const log = gitLog.all.sort((a, b) => (new Date(b.date).getTime() - new Date(a.date).getTime()));
let previous = '';
const headings = [];
for (const l of log) {
const msg = l.message.toLowerCase();
if ((l.refs !== '') || msg.match(/^[0-99].[0-99].[0-99]/)) {
const dt = dayjs(l.date).format('YYYY/MM/DD');
let ver = msg.match(/[0-99].[0-99].[0-99]/) ? msg : l.refs;
ver = ver.replace('tag: v', '').replace('tag: ', 'release: ').split(',')[0];
const heading = `\n### **${ver}** ${dt} ${l.author_email}\n\n`;
if (!headings.includes(heading) && !ver.startsWith('tag')) {
headings.push(heading);
text += heading;
}
} else if ((msg.length > 2) && !msg.startsWith('update') && (previous !== msg)) {
previous = msg;
text += `- ${msg}\n`;
}
}
const name = path.join(__dirname, f);
fs.writeFileSync(name, text);
logger.info('Update Change log:', [name]);
}
if (require.main === module) {
update('../CHANGELOG.md');
} else {
exports.update = update;
}

View File

@ -1,31 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIFazCCA1OgAwIBAgIUKQKodDBJnuweJs5IcTyL4NIp3vgwDQYJKoZIhvcNAQEL
BQAwRTELMAkGA1UEBhMCVVMxEDAOBgNVBAgMB0Zsb3JpZGExDjAMBgNVBAcMBU1p
YW1pMRQwEgYDVQQKDAtAdmxhZG1hbmRpYzAeFw0yMDExMDcxNTE3NDNaFw0yMTEx
MDcxNTE3NDNaMEUxCzAJBgNVBAYTAlVTMRAwDgYDVQQIDAdGbG9yaWRhMQ4wDAYD
VQQHDAVNaWFtaTEUMBIGA1UECgwLQHZsYWRtYW5kaWMwggIiMA0GCSqGSIb3DQEB
AQUAA4ICDwAwggIKAoICAQDSC88PF8NyLkagK5mAZ/d739SOU16l2Cx3zE35zZQh
O29+1L4L+oMksLYipo+FMgtGO+MSzFsvGgKCs2sDSdfyoNSTZ3QaN4BAZ0sbq+wL
cke7yRBTM/XIGOQfhqq8yC2q8/zXwUbZg0UsCAxDGNwUr0Qlm829laIU/UN1KcYS
57Nebl1z05wMEvYmyl4JBAl9ozne7KS9DyW7jbrAXE8TaEy3+pY66kx5GG6v2+up
ScITGm4YPmPPlpOF1UjQloosgxdVa+fVp8aNCa/rf0JNO0Uhb3OKOZ+4kYmpfPn/
trwoKWAa6CV1uAJ+3zDkLMq1JNlrV4OMp1QvX0wzA47a/n466JMN9SFb0Ng5wf19
VOtT5Zu7chDStBudVjxlMDfUixvhvn4sjbaLNYR1fyWPoNXwr0KX2lpTP1QOzp9/
Sd0iiJ8RPfXn8Xo26MStu4I52CZjS7yEMgJGCLH/mgPuSbrHHYYrrrCPJgmQOZG2
TNMI+EqOwQvHh2ghdv7t7EEk4IslBk0QzufMXQ2WFXQ20nvj74mrmmiMuBcmonpR
0egA5/M18ZPLQxYu0Q86NUr4XHtAG1fq+n8pseQ7Avy6Gk6HRiezCbB7TJ9rnNeu
jie1TDajC6W7rx0VF7hcxkIrDgNgnYcjXUV2hMx1lo4fIoWkL3nJJVEthMVIcJOX
EwIDAQABo1MwUTAdBgNVHQ4EFgQUHawIRAo1bW8Xy7l4oKfM+ESjhs0wHwYDVR0j
BBgwFoAUHawIRAo1bW8Xy7l4oKfM+ESjhs0wDwYDVR0TAQH/BAUwAwEB/zANBgkq
hkiG9w0BAQsFAAOCAgEAozQJk5Ahx7rDn/aMXLdZFxR81VfkmHDm7NhlJsdVKUx5
o/iegXnvwc1PoeKsz2S504QiuL8l7jqZoU2WPIm7Vlr+oxBgiKqjo1EqBsUgNCZ7
qxMD84TVp/KBGjKUh1TXhjJwGGfNNr+R/fJGw+36UeuY3fSckjaYTuNuVElp+DoZ
/pGyu1qpcybLfiR8mpQkCeU/iBq5gIjWddbVjlYoTKfqULZrpsAF2AeqELEgyshl
p3PNhW/54TJSn4mWK+39BibYHPkvx8orEuWKyjjRk82hEXi7J3hsGKX29qC3oO40
67DKDWmZdMCz+E1ERf10V0bSp6iJnnlwknHJloZUETV1NY/DdoSC6e8CN0+0cQqL
aJefJ483O3sXyN3v3+DaEFBLPFgRFGZB7eaBwR2xAv/KfjT5dSyi+wA4LZAxsQMC
Q7UYGNAfHLNHJo/bsj12+JDhJaFZ/KoBKzyMUuEXmvjxXNDMCfm+gVQFoLyXkGq3
491W/O7LjR6pkD+ce0qeTFMu3nfUubyfbONVDEfuH4GC1e+FAggCRaBnFsVzCzXj
jxOOLoQ9nwLk8v17mx0BSwX4iuqvXFntfJbzfcnzQfx/qqPFheIbGnmKw1lrRML8
87ZbN6t01+v2YyYe6Mc7p80s1R3jc8aVX8ca2KcYwsJAkg/xz0q5RJwsE1is5UY=
-----END CERTIFICATE-----

View File

@ -1,52 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQDSC88PF8NyLkag
K5mAZ/d739SOU16l2Cx3zE35zZQhO29+1L4L+oMksLYipo+FMgtGO+MSzFsvGgKC
s2sDSdfyoNSTZ3QaN4BAZ0sbq+wLcke7yRBTM/XIGOQfhqq8yC2q8/zXwUbZg0Us
CAxDGNwUr0Qlm829laIU/UN1KcYS57Nebl1z05wMEvYmyl4JBAl9ozne7KS9DyW7
jbrAXE8TaEy3+pY66kx5GG6v2+upScITGm4YPmPPlpOF1UjQloosgxdVa+fVp8aN
Ca/rf0JNO0Uhb3OKOZ+4kYmpfPn/trwoKWAa6CV1uAJ+3zDkLMq1JNlrV4OMp1Qv
X0wzA47a/n466JMN9SFb0Ng5wf19VOtT5Zu7chDStBudVjxlMDfUixvhvn4sjbaL
NYR1fyWPoNXwr0KX2lpTP1QOzp9/Sd0iiJ8RPfXn8Xo26MStu4I52CZjS7yEMgJG
CLH/mgPuSbrHHYYrrrCPJgmQOZG2TNMI+EqOwQvHh2ghdv7t7EEk4IslBk0QzufM
XQ2WFXQ20nvj74mrmmiMuBcmonpR0egA5/M18ZPLQxYu0Q86NUr4XHtAG1fq+n8p
seQ7Avy6Gk6HRiezCbB7TJ9rnNeujie1TDajC6W7rx0VF7hcxkIrDgNgnYcjXUV2
hMx1lo4fIoWkL3nJJVEthMVIcJOXEwIDAQABAoICAF45S+ZSW6uh1K7PQCnY+a0J
CJncDk5JPhFzhds0fGm39tknaCWJeEECQIIkw6cVfvc/sCpjn9fuTAgDolK0UnoV
6aZCN1P3Z8H8VDYSlm3AEyvLE1avrWbYu6TkzTyoc8wHbXn/yt+SQnpxFccXpMpm
oSRZ0x5jvHS79AHf/mnGpLEMw0FNQOgtrVxTVYGn3PYOPcyhzXi+Dcgn2QmnnxVu
qVOyxqehKTL9YdHjzsB/RN868P5RJocd3gmgVuyzS0KSf+oi4Ln4bFoiaVc0HDL3
DpjkHSl5lgu+xclRNfifKaK+hM0tLHi1VfFB//WrnjdKU3oSpQF4oowprM4Jn5AP
jhRI54JWZlWnvbiAOx7D49xFga3EnqjVH6So2gxi+q3Dv25luXGAnueaBPDpVC6c
nkJm2aCl7T3xlVpW8O5Fs+rsP8Xr9RTyEQJauM01uOi3N2zEeO8ERxTYEW5Sy2U7
OFKRXtLj7Jnejib/SxWGcIX4Wid5QFAygbXz4APfFN22QU0fqmhm4/c2OB/xM8qr
VVFx4xlG2wnuq5CZdZjmK3MTbmSM+pWW8mly/+++p694cf5oXGenYus/JWFNwxj/
fPyA7zQmaTOidu6clDHzkPCOE7TBv9TkQ7lL6ClgE7B39JR65ZQtjCYqRsADKsGI
dFMg+HDmGbVEfWg2V0GBAoIBAQDupImrJ0JXHA/0SEC2Tbz7pE60fRwmBFdhvk4Z
rzZiaOl+M2HXQU6b5DYhKcgdiFah5IuAnsRPo6X5Ug+Q1DV3OFTuEGAkXgqZliNa
aXsJcc0++DYlXX3BrTb66gylVLQRs5tZzsXps5iXWclziDC2go8RKnCwxsxwbzVq
FP4hoBP4dp83WoLF4NznnGFGw3/KLlMivtRxDE5OegpxTuWGlA/bVtT187Ksuuz3
dFUayLfpg0ABS/E7wwAJjSUpPPEi3J/G255H3lZXgS1gWcAf3rGDQYlJKF8UHdja
yWQcAOF+b/bYEpa4lHw+UtKNNkPTiCV4Y7CNQd8a2Gcl7VFTAoIBAQDhUs9r1dhm
rUlNAunVZZZVZ91XhXeqVTa/9xUDEvDh91nB5c7CcuNXxwcX4oTsMF4Bc7CHlvOv
pybp+QLjK310VjxxkFYJT0TKWuYqLjtNkQ93sp8wF3gVCf8m8bMOX/gPfQzNZWKp
un+ZWnzXNU5d2A+63xbZmFzT0Zo6H/h9YEO5Xxw32HCKFzEhl5JD34muZTEXSpdD
p7LUUr5LvnoUqEzonhXx2qRnTLP87d1o0GlkVex9HeeeBgrvm57QYoJnABxw9UFM
/ocLeYsjkmqJQRBDWgiwQlos1pdZyX2Yj20b7Wm5Pxd4aM9gh5EZZMXeQHhbHlWz
UY1IPxfAkytBAoIBAHmYavFDisD58oMlAZwiViXeXaAHk30nfyK1pfPeXBaeoEKG
idb1VsmF6bLSKD4sBwBshExgGWT+3IYCMx43kpqRoGzA+UvugvYpExBxaJiyXMM2
E9jMH1S9HqOQ+CqR00KlwoVrH1rqANk1jbkJbtDAC4fSmSLp2Kd9crj/w1F80FAs
mQnKW5HZ9pUpEEPPP2DUY9XzaCnF/GxuML31VmxRKxc20kIUDzmF8VJQ+0Avf85C
6yz99gfeXzl+qq2teKyrv9nCc47pEhN6JZXPhV53yPk5PmuBX5jPcHxiW1kNddhH
0n3cUuHv/rJ+3vvG555z46vJF9+R7c0u8LfZiTMCggEBAMQd4a/IN0xXM1+2U3SL
sSew+XR+FMPK25aGJmHAkKz9L8CWlzmj6cCy2LevT2aMSqYU3eeGOZ//at1nAV5c
shsaHA30RQ5hUkyWhZLdHnzK752NeQTQyJH3W3+4C9NNMIm6m/QCdLeqPflqSxK9
sPH5ZueN2UOXW+R5oTVKMmxd51RnNhZdasamnPrSBFrTK/EA3pOZNsOKKRqo0jz3
Eyb7vcUSI6OYXFQU7OwO1RGvpKvSJb5Y0wo11DrtRnO16i5gaGDg9u9e8ofISJSz
kcrZOKCGst1HQ1mXhbB+sbSh0aPnJog4I+OHxkgMdvyVO6vQjXExnAIxzzi8wZ25
+oECggEBAIT6q/sn8xFt5Jwc/0Z7YUjd415Nknam09tnbB+UPRR6lt6JFoILx8by
5Y1sN30HWDv27v9G32oZhUDii3Rt3PkbYLqlHy7XBMEXA9WIUo+3Be7mtdL8Wfrj
0zn0b7Hks9a9KsElG1dXUopwjMRL3M22UamaN7e/gl5jz2I7pyc5oaqz9GRDV5yG
slb6gGZ5naMycJD3p8vutXbmgKRr9beRp55UICAbEMdr5p3ks8bfR33Z6t+a97u1
IxI5x5Lb0fdfvL8JK3nRWn7Uzbmm5Ni/OaODNKP+fIm9m2yDAs8LM8RGpPtk6i0d
qIRta3H9KNw2Mhpkm77TtUSV/W5aOmY=
-----END PRIVATE KEY-----

View File

@ -1,162 +0,0 @@
/*
micro http2 server with file monitoring and automatic app rebuild
- can process concurrent http requests
- monitors specified filed and folders for changes
- triggers library and application rebuild
- any build errors are immediately displayed and can be corrected without need for restart
- passthrough data compression
*/
const fs = require('fs');
const zlib = require('zlib');
const http = require('http');
const http2 = require('http2');
const path = require('path');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const chokidar = require('chokidar');
// eslint-disable-next-line node/no-unpublished-require, import/no-extraneous-dependencies
const log = require('@vladmandic/pilogger');
const build = require('./build.js');
// app configuration
// you can provide your server key and certificate or use provided self-signed ones
// self-signed certificate generated using:
// openssl req -x509 -newkey rsa:4096 -nodes -keyout https.key -out https.crt -days 365 -subj "/C=US/ST=Florida/L=Miami/O=@vladmandic"
// client app does not work without secure server since browsers enforce https for webcam access
const options = {
key: fs.readFileSync('server/https.key'),
cert: fs.readFileSync('server/https.crt'),
root: '..',
default: 'demo/index.html',
httpPort: 8000,
httpsPort: 8001,
insecureHTTPParser: false,
minElapsed: 2,
monitor: ['package.json', 'demo', 'src'],
};
// just some predefined mime types
const mime = {
'.html': 'text/html; charset=utf-8',
'.js': 'text/javascript; charset=utf-8',
'.css': 'text/css; charset=utf-8',
'.json': 'application/json; charset=utf-8',
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.gif': 'image/gif',
'.ico': 'image/x-icon',
'.svg': 'image/svg+xml',
'.wav': 'audio/wav',
'.mp4': 'video/mp4',
'.woff': 'font/woff',
'.woff2': 'font/woff2',
'.ttf': 'font/ttf',
'.wasm': 'application/wasm',
};
// checks for multiple events triggering within minElapsed and merge get into single event
let last = Date.now();
async function buildAll(evt, msg) {
const now = Date.now();
if ((now - last) > options.minElapsed) build.build(evt, msg, true);
else log.state('Build: merge event file', msg, evt);
last = now;
}
// watch filesystem for any changes and notify build when needed
async function watch() {
const watcher = chokidar.watch(options.monitor, {
persistent: true,
ignorePermissionErrors: false,
alwaysStat: false,
ignoreInitial: true,
followSymlinks: true,
usePolling: false,
useFsEvents: false,
atomic: true,
});
// single event handler for file add/change/delete
watcher
.on('add', (evt) => buildAll(evt, 'add'))
.on('change', (evt) => buildAll(evt, 'modify'))
.on('unlink', (evt) => buildAll(evt, 'remove'))
.on('error', (err) => log.error(`Client watcher error: ${err}`))
.on('ready', () => log.state('Monitoring:', options.monitor));
}
// get file content for a valid url request
function handle(url) {
return new Promise((resolve) => {
let obj = { ok: false };
obj.file = url;
if (!fs.existsSync(obj.file)) resolve(null);
obj.stat = fs.statSync(obj.file);
if (obj.stat.isFile()) obj.ok = true;
if (!obj.ok && obj.stat.isDirectory()) {
obj.file = path.join(obj.file, options.default);
// @ts-ignore
obj = handle(obj.file);
}
resolve(obj);
});
}
// process http requests
async function httpRequest(req, res) {
handle(path.join(__dirname, options.root, decodeURI(req.url)))
.then((result) => {
// get original ip of requestor, regardless if it's behind proxy or not
// eslint-disable-next-line dot-notation
const forwarded = (req.headers['forwarded'] || '').match(/for="\[(.*)\]:/);
const ip = (Array.isArray(forwarded) ? forwarded[1] : null) || req.headers['x-forwarded-for'] || req.ip || req.socket.remoteAddress;
if (!result || !result.ok) {
res.writeHead(404, { 'Content-Type': 'text/html' });
res.end('Error 404: Not Found\n', 'utf-8');
log.warn(`${req.method}/${req.httpVersion}`, res.statusCode, req.url, ip);
} else {
const ext = String(path.extname(result.file)).toLowerCase();
const contentType = mime[ext] || 'application/octet-stream';
const accept = req.headers['accept-encoding'] ? req.headers['accept-encoding'].includes('br') : false; // does target accept brotli compressed data
res.writeHead(200, {
// 'Content-Length': result.stat.size, // not using as it's misleading for compressed streams
'Content-Language': 'en', 'Content-Type': contentType, 'Content-Encoding': accept ? 'br' : '', 'Last-Modified': result.stat.mtime, 'Cache-Control': 'no-cache', 'X-Content-Type-Options': 'nosniff',
});
const compress = zlib.createBrotliCompress({ params: { [zlib.constants.BROTLI_PARAM_QUALITY]: 5 } }); // instance of brotli compression with level 5
const stream = fs.createReadStream(result.file);
if (!accept) stream.pipe(res); // don't compress data
else stream.pipe(compress).pipe(res); // compress data
// alternative methods of sending data
/// 2. read stream and send by chunk
// const stream = fs.createReadStream(result.file);
// stream.on('data', (chunk) => res.write(chunk));
// stream.on('end', () => res.end());
// 3. read entire file and send it as blob
// const data = fs.readFileSync(result.file);
// res.write(data);
log.data(`${req.method}/${req.httpVersion}`, res.statusCode, contentType, result.stat.size, req.url, ip);
}
return null;
})
.catch((err) => log.error('handle error:', err));
}
// app main entry point
async function main() {
log.header();
await watch();
if (options.httpPort && options.httpPort > 0) {
const server1 = http.createServer(options, httpRequest);
server1.on('listening', () => log.state('HTTP server listening:', options.httpPort));
server1.listen(options.httpPort);
}
if (options.httpsPort && options.httpsPort > 0) {
const server2 = http2.createSecureServer(options, httpRequest);
server2.on('listening', () => log.state('HTTP2 server listening:', options.httpsPort));
server2.listen(options.httpsPort);
}
await build.build('all', 'startup', true);
}
main();

View File

@ -10,9 +10,9 @@ export abstract class NeuralNetwork<TNetParams> {
this._name = name; this._name = name;
} }
protected _params: TNetParams | undefined = undefined protected _params: TNetParams | undefined = undefined;
protected _paramMappings: ParamMapping[] = [] protected _paramMappings: ParamMapping[] = [];
public _name: any; public _name: any;
@ -62,7 +62,7 @@ export abstract class NeuralNetwork<TNetParams> {
}); });
} }
public dispose(throwOnRedispose: boolean = true) { public dispose(throwOnRedispose = true) {
this.getParamList().forEach((param) => { this.getParamList().forEach((param) => {
if (throwOnRedispose && param.tensor.isDisposed) { if (throwOnRedispose && param.tensor.isDisposed) {
throw new Error(`param tensor has already been disposed for path ${param.path}`); throw new Error(`param tensor has already been disposed for path ${param.path}`);
@ -102,8 +102,9 @@ export abstract class NeuralNetwork<TNetParams> {
} }
const { readFile } = env.getEnv(); const { readFile } = env.getEnv();
const { manifestUri, modelBaseUri } = getModelUris(filePath, this.getDefaultModelName()); const { manifestUri, modelBaseUri } = getModelUris(filePath, this.getDefaultModelName());
const fetchWeightsFromDisk = (filePaths: string[]) => Promise.all(filePaths.map((fp) => readFile(fp).then((buf) => buf.buffer))); const fetchWeightsFromDisk = (filePaths: string[]) => Promise.all(filePaths.map((fp) => readFile(fp).then((buf) => (typeof buf === 'string' ? Buffer.from(buf) : buf.buffer))));
const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk); // @ts-ignore async-vs-sync mismatch
const loadWeights = tf['io'].weightsLoaderFactory(fetchWeightsFromDisk);
const manifest = JSON.parse((await readFile(manifestUri)).toString()); const manifest = JSON.parse((await readFile(manifestUri)).toString());
const weightMap = await loadWeights(manifest, modelBaseUri); const weightMap = await loadWeights(manifest, modelBaseUri);
this.loadFromWeightMap(weightMap); this.loadFromWeightMap(weightMap);

View File

@ -1,6 +1,10 @@
export class PlatformBrowser { export class PlatformBrowser {
private textEncoder: TextEncoder; private textEncoder: TextEncoder;
constructor() {
this.textEncoder = new TextEncoder();
}
fetch(path: string, init?: any): Promise<Response> { fetch(path: string, init?: any): Promise<Response> {
return fetch(path, init); return fetch(path, init);
} }
@ -13,9 +17,6 @@ export class PlatformBrowser {
if (encoding !== 'utf-8' && encoding !== 'utf8') { if (encoding !== 'utf-8' && encoding !== 'utf8') {
throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`); throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);
} }
if (this.textEncoder == null) {
this.textEncoder = new TextEncoder();
}
return this.textEncoder.encode(text); return this.textEncoder.encode(text);
} }

View File

@ -1,5 +1,4 @@
import * as tf from '../../dist/tfjs.esm'; import * as tf from '../../dist/tfjs.esm.js';
import { fullyConnectedLayer } from '../common/fullyConnectedLayer'; import { fullyConnectedLayer } from '../common/fullyConnectedLayer';
import { seperateWeightMaps } from '../faceProcessor/util'; import { seperateWeightMaps } from '../faceProcessor/util';
import { TinyXception } from '../xception/TinyXception'; import { TinyXception } from '../xception/TinyXception';
@ -10,7 +9,7 @@ import { NeuralNetwork } from '../NeuralNetwork';
import { NetInput, TNetInput, toNetInput } from '../dom/index'; import { NetInput, TNetInput, toNetInput } from '../dom/index';
export class AgeGenderNet extends NeuralNetwork<NetParams> { export class AgeGenderNet extends NeuralNetwork<NetParams> {
private _faceFeatureExtractor: TinyXception private _faceFeatureExtractor: TinyXception;
constructor(faceFeatureExtractor: TinyXception = new TinyXception(2)) { constructor(faceFeatureExtractor: TinyXception = new TinyXception(2)) {
super('AgeGenderNet'); super('AgeGenderNet');
@ -85,7 +84,7 @@ export class AgeGenderNet extends NeuralNetwork<NetParams> {
return 'age_gender_model'; return 'age_gender_model';
} }
public dispose(throwOnRedispose: boolean = true) { public override dispose(throwOnRedispose = true) {
this.faceFeatureExtractor.dispose(throwOnRedispose); this.faceFeatureExtractor.dispose(throwOnRedispose);
super.dispose(throwOnRedispose); super.dispose(throwOnRedispose);
} }

View File

@ -7,10 +7,8 @@ export interface IBoundingBox {
bottom: number bottom: number
} }
export class BoundingBox extends Box<BoundingBox> implements IBoundingBox { export class BoundingBox extends Box implements IBoundingBox {
constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions: boolean = false) { constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions = false) {
super({ super({ left, top, right, bottom }, allowNegativeDimensions);
left, top, right, bottom,
}, allowNegativeDimensions);
} }
} }

View File

@ -9,7 +9,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
return !!rect && [rect.x, rect.y, rect.width, rect.height].every(isValidNumber); return !!rect && [rect.x, rect.y, rect.width, rect.height].every(isValidNumber);
} }
public static assertIsValidBox(box: any, callee: string, allowNegativeDimensions: boolean = false) { public static assertIsValidBox(box: any, callee: string, allowNegativeDimensions = false) {
if (!Box.isRect(box)) { if (!Box.isRect(box)) {
throw new Error(`${callee} - invalid box: ${JSON.stringify(box)}, expected object with properties x, y, width, height`); throw new Error(`${callee} - invalid box: ${JSON.stringify(box)}, expected object with properties x, y, width, height`);
} }
@ -19,15 +19,15 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
} }
} }
private _x: number private _x: number;
private _y: number private _y: number;
private _width: number private _width: number;
private _height: number private _height: number;
constructor(_box: IBoundingBox | IRect, allowNegativeDimensions: boolean = true) { constructor(_box: IBoundingBox | IRect, allowNegativeDimensions = true) {
const box = (_box || {}) as any; const box = (_box || {}) as any;
const isBbox = [box.left, box.top, box.right, box.bottom].every(isValidNumber); const isBbox = [box.left, box.top, box.right, box.bottom].every(isValidNumber);
@ -128,9 +128,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
this.width + padX, this.width + padX,
this.height + padY, this.height + padY,
]; ];
return new Box({ return new Box({ x, y, width, height });
x, y, width, height,
});
} }
public clipAtImageBorders(imgWidth: number, imgHeight: number): Box<BoxType> { public clipAtImageBorders(imgWidth: number, imgHeight: number): Box<BoxType> {
@ -143,9 +141,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
const clippedWidth = Math.min(newWidth, imgWidth - clippedX); const clippedWidth = Math.min(newWidth, imgWidth - clippedX);
const clippedHeight = Math.min(newHeight, imgHeight - clippedY); const clippedHeight = Math.min(newHeight, imgHeight - clippedY);
return (new Box({ return (new Box({ x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight })).floor();
x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight,
})).floor();
} }
public shift(sx: number, sy: number): Box<BoxType> { public shift(sx: number, sy: number): Box<BoxType> {
@ -153,9 +149,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
const x = this.x + sx; const x = this.x + sx;
const y = this.y + sy; const y = this.y + sy;
return new Box({ return new Box({ x, y, width, height });
x, y, width, height,
});
} }
public padAtBorders(imageHeight: number, imageWidth: number) { public padAtBorders(imageHeight: number, imageWidth: number) {
@ -189,9 +183,7 @@ export class Box<BoxType = any> implements IBoundingBox, IRect {
y = 1; y = 1;
} }
return { return { dy, edy, dx, edx, y, ey, x, ex, w, h };
dy, edy, dx, edx, y, ey, x, ex, w, h,
};
} }
public calibrate(region: Box) { public calibrate(region: Box) {

View File

@ -6,9 +6,9 @@ export interface IDimensions {
} }
export class Dimensions implements IDimensions { export class Dimensions implements IDimensions {
private _width: number private _width: number;
private _height: number private _height: number;
constructor(width: number, height: number) { constructor(width: number, height: number) {
if (!isValidNumber(width) || !isValidNumber(height)) { if (!isValidNumber(width) || !isValidNumber(height)) {

View File

@ -17,7 +17,7 @@ export class FaceDetection extends ObjectDetection implements IFaceDetecion {
super(score, score, '', relativeBox, imageDims); super(score, score, '', relativeBox, imageDims);
} }
public forSize(width: number, height: number): FaceDetection { public override forSize(width: number, height: number): FaceDetection {
const { score, relativeBox, imageDims } = super.forSize(width, height); const { score, relativeBox, imageDims } = super.forSize(width, height);
return new FaceDetection(score, relativeBox, imageDims); return new FaceDetection(score, relativeBox, imageDims);
} }

View File

@ -18,11 +18,11 @@ export interface IFaceLandmarks {
} }
export class FaceLandmarks implements IFaceLandmarks { export class FaceLandmarks implements IFaceLandmarks {
protected _shift: Point protected _shift: Point;
protected _positions: Point[] protected _positions: Point[];
protected _imgDims: Dimensions protected _imgDims: Dimensions;
constructor( constructor(
relativeFaceLandmarkPositions: Point[], relativeFaceLandmarkPositions: Point[],

View File

@ -3,7 +3,7 @@ import { FaceLandmarks } from './FaceLandmarks';
import { Point } from './Point'; import { Point } from './Point';
export class FaceLandmarks5 extends FaceLandmarks { export class FaceLandmarks5 extends FaceLandmarks {
protected getRefPointsForAlignment(): Point[] { protected override getRefPointsForAlignment(): Point[] {
const pts = this.positions; const pts = this.positions;
return [ return [
pts[0], pts[0],

View File

@ -31,7 +31,7 @@ export class FaceLandmarks68 extends FaceLandmarks {
return this.positions.slice(48, 68); return this.positions.slice(48, 68);
} }
protected getRefPointsForAlignment(): Point[] { protected override getRefPointsForAlignment(): Point[] {
return [ return [
this.getLeftEye(), this.getLeftEye(),
this.getRightEye(), this.getRightEye(),

View File

@ -6,9 +6,8 @@ export interface IFaceMatch {
} }
export class FaceMatch implements IFaceMatch { export class FaceMatch implements IFaceMatch {
private _label: string private _label: string;
private _distance: number;
private _distance: number
constructor(label: string, distance: number) { constructor(label: string, distance: number) {
this._label = label; this._label = label;
@ -19,7 +18,7 @@ export class FaceMatch implements IFaceMatch {
public get distance(): number { return this._distance; } public get distance(): number { return this._distance; }
public toString(withDistance: boolean = true): string { public toString(withDistance = true): string {
return `${this.label}${withDistance ? ` (${round(this.distance)})` : ''}`; return `${this.label}${withDistance ? ` (${round(this.distance)})` : ''}`;
} }
} }

View File

@ -3,16 +3,15 @@ import { IBoundingBox } from './BoundingBox';
import { Box } from './Box'; import { Box } from './Box';
import { IRect } from './Rect'; import { IRect } from './Rect';
export class LabeledBox extends Box<LabeledBox> { export class LabeledBox extends Box {
public static assertIsValidLabeledBox(box: any, callee: string) { public static assertIsValidLabeledBox(box: any, callee: string) {
Box.assertIsValidBox(box, callee); Box.assertIsValidBox(box, callee);
if (!isValidNumber(box.label)) { if (!isValidNumber(box.label)) {
throw new Error(`${callee} - expected property label (${box.label}) to be a number`); throw new Error(`${callee} - expected property label (${box.label}) to be a number`);
} }
} }
private _label: number private _label: number;
constructor(box: IBoundingBox | IRect | any, label: number) { constructor(box: IBoundingBox | IRect | any, label: number) {
super(box); super(box);

View File

@ -1,7 +1,7 @@
export class LabeledFaceDescriptors { export class LabeledFaceDescriptors {
private _label: string private _label: string;
private _descriptors: Float32Array[] private _descriptors: Float32Array[];
constructor(label: string, descriptors: Float32Array[]) { constructor(label: string, descriptors: Float32Array[]) {
if (!(typeof label === 'string')) { if (!(typeof label === 'string')) {

View File

@ -3,15 +3,15 @@ import { Dimensions, IDimensions } from './Dimensions';
import { IRect, Rect } from './Rect'; import { IRect, Rect } from './Rect';
export class ObjectDetection { export class ObjectDetection {
private _score: number private _score: number;
private _classScore: number private _classScore: number;
private _className: string private _className: string;
private _box: Rect private _box: Rect;
private _imageDims: Dimensions private _imageDims: Dimensions;
constructor( constructor(
score: number, score: number,

View File

@ -4,9 +4,9 @@ export interface IPoint {
} }
export class Point implements IPoint { export class Point implements IPoint {
private _x: number private _x: number;
private _y: number private _y: number;
constructor(x: number, y: number) { constructor(x: number, y: number) {
this._x = x; this._x = x;

View File

@ -15,9 +15,9 @@ export class PredictedBox extends LabeledBox {
} }
} }
private _score: number private _score: number;
private _classScore: number private _classScore: number;
constructor(box: IBoundingBox | IRect | any, label: number, score: number, classScore: number) { constructor(box: IBoundingBox | IRect | any, label: number, score: number, classScore: number) {
super(box, label); super(box, label);

View File

@ -7,10 +7,8 @@ export interface IRect {
height: number height: number
} }
export class Rect extends Box<Rect> implements IRect { export class Rect extends Box implements IRect {
constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions: boolean = false) { constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions = false) {
super({ super({ x, y, width, height }, allowNegativeDimensions);
x, y, width, height,
}, allowNegativeDimensions);
} }
} }

View File

@ -6,7 +6,7 @@ export function convLayer(
x: tf.Tensor4D, x: tf.Tensor4D,
params: ConvParams, params: ConvParams,
padding: 'valid' | 'same' = 'same', padding: 'valid' | 'same' = 'same',
withRelu: boolean = false, withRelu = false,
): tf.Tensor4D { ): tf.Tensor4D {
return tf.tidy(() => { return tf.tidy(() => {
const out = tf.add( const out = tf.add(

View File

@ -9,19 +9,19 @@ import { imageToSquare } from './imageToSquare';
import { TResolvedNetInput } from './types'; import { TResolvedNetInput } from './types';
export class NetInput { export class NetInput {
private _imageTensors: Array<tf.Tensor3D | tf.Tensor4D> = [] private _imageTensors: Array<tf.Tensor3D | tf.Tensor4D> = [];
private _canvases: HTMLCanvasElement[] = [] private _canvases: HTMLCanvasElement[] = [];
private _batchSize: number private _batchSize: number;
private _treatAsBatchInput: boolean = false private _treatAsBatchInput = false;
private _inputDimensions: number[][] = [] private _inputDimensions: number[][] = [];
private _inputSize: number private _inputSize = 0;
constructor(inputs: Array<TResolvedNetInput>, treatAsBatchInput: boolean = false) { constructor(inputs: Array<TResolvedNetInput>, treatAsBatchInput = false) {
if (!Array.isArray(inputs)) { if (!Array.isArray(inputs)) {
throw new Error(`NetInput.constructor - expected inputs to be an Array of TResolvedNetInput or to be instanceof tf.Tensor4D, instead have ${inputs}`); throw new Error(`NetInput.constructor - expected inputs to be an Array of TResolvedNetInput or to be instanceof tf.Tensor4D, instead have ${inputs}`);
} }
@ -47,8 +47,9 @@ export class NetInput {
return; return;
} }
// @ts-ignore
const canvas = (input as any) instanceof env.getEnv().Canvas ? input : createCanvasFromMedia(input); const canvas = (input as any) instanceof env.getEnv().Canvas ? input : createCanvasFromMedia(input);
this._canvases[idx] = canvas; this._canvases[idx] = canvas as HTMLCanvasElement;
this._inputDimensions[idx] = [canvas.height, canvas.width, 3]; this._inputDimensions[idx] = [canvas.height, canvas.width, 3];
}); });
} }
@ -118,7 +119,7 @@ export class NetInput {
* both sides of the minor dimension oof the image. * both sides of the minor dimension oof the image.
* @returns The batch tensor. * @returns The batch tensor.
*/ */
public toBatchTensor(inputSize: number, isCenterInputs: boolean = true): tf.Tensor4D { public toBatchTensor(inputSize: number, isCenterInputs = true): tf.Tensor4D {
this._inputSize = inputSize; this._inputSize = inputSize;
return tf.tidy(() => { return tf.tidy(() => {
@ -127,23 +128,24 @@ export class NetInput {
if (input instanceof tf.Tensor) { if (input instanceof tf.Tensor) {
let imgTensor = isTensor4D(input) ? input : tf.expandDims(input); let imgTensor = isTensor4D(input) ? input : tf.expandDims(input);
imgTensor = padToSquare(imgTensor, isCenterInputs); imgTensor = padToSquare(imgTensor as tf.Tensor4D, isCenterInputs);
if (imgTensor.shape[1] !== inputSize || imgTensor.shape[2] !== inputSize) { if (imgTensor.shape[1] !== inputSize || imgTensor.shape[2] !== inputSize) {
imgTensor = tf.image.resizeBilinear(imgTensor, [inputSize, inputSize], false, false); imgTensor = tf['image'].resizeBilinear(imgTensor as tf.Tensor4D, [inputSize, inputSize], false, false);
} }
return imgTensor.as3D(inputSize, inputSize, 3); return imgTensor.as3D(inputSize, inputSize, 3);
} }
if (input instanceof env.getEnv().Canvas) { if (input instanceof env.getEnv().Canvas) {
return tf.browser.fromPixels(imageToSquare(input, inputSize, isCenterInputs)); return tf['browser'].fromPixels(imageToSquare(input, inputSize, isCenterInputs));
} }
throw new Error(`toBatchTensor - at batchIdx ${batchIdx}, expected input to be instanceof tf.Tensor or instanceof HTMLCanvasElement, instead have ${input}`); throw new Error(`toBatchTensor - at batchIdx ${batchIdx}, expected input to be instanceof tf.Tensor or instanceof HTMLCanvasElement, instead have ${input}`);
}); });
const batchTensor = tf.stack(inputTensors.map((t) => tf.cast(t, 'float32'))).as4D(this.batchSize, inputSize, inputSize, 3); const batchTensor = tf.stack(inputTensors.map((t) => tf.cast(t, 'float32'))).as4D(this.batchSize, inputSize, inputSize, 3);
// const batchTensor = tf.stack(inputTensors.map((t) => tf.cast(t, 'float32'))) as tf.Tensor4D;
return batchTensor; return batchTensor;
}); });

View File

@ -4,7 +4,10 @@ import { isMediaLoaded } from './isMediaLoaded';
export function awaitMediaLoaded(media: HTMLImageElement | HTMLVideoElement | HTMLCanvasElement) { export function awaitMediaLoaded(media: HTMLImageElement | HTMLVideoElement | HTMLCanvasElement) {
// eslint-disable-next-line consistent-return // eslint-disable-next-line consistent-return
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
if (media instanceof env.getEnv().Canvas || isMediaLoaded(media)) return resolve(null); if (media instanceof env.getEnv().Canvas || isMediaLoaded(media)) {
resolve(null);
return;
}
function onError(e: Event) { function onError(e: Event) {
if (!e.currentTarget) return; if (!e.currentTarget) return;

15
src/dom/bufferToVideo.ts Normal file
View File

@ -0,0 +1,15 @@
import { env } from '../env/index';
export function bufferToVideo(buf: Blob): Promise<HTMLVideoElement> {
return new Promise((resolve, reject) => {
if (!(buf instanceof Blob)) reject(new Error('bufferToVideo - expected buf to be of type: Blob'));
const video = env.getEnv().createVideoElement();
video.oncanplay = () => resolve(video);
video.onerror = reject;
video.playsInline = true;
video.muted = true;
video.src = URL.createObjectURL(buf);
video.play();
});
}

View File

@ -25,17 +25,11 @@ export async function extractFaceTensors(imageTensor: tf.Tensor3D | tf.Tensor4D,
return tf.tidy(() => { return tf.tidy(() => {
const [imgHeight, imgWidth, numChannels] = imageTensor.shape.slice(isTensor4D(imageTensor) ? 1 : 0); const [imgHeight, imgWidth, numChannels] = imageTensor.shape.slice(isTensor4D(imageTensor) ? 1 : 0);
const boxes = detections.map((det) => (det instanceof FaceDetection ? det.forSize(imgWidth, imgHeight).box : det))
const boxes = detections
.map((det) => (det instanceof FaceDetection
? det.forSize(imgWidth, imgHeight).box
: det))
.map((box) => box.clipAtImageBorders(imgWidth, imgHeight)); .map((box) => box.clipAtImageBorders(imgWidth, imgHeight));
const faceTensors = boxes
const faceTensors = boxes.map(({ .filter((box) => box.width > 0 && box.height > 0)
x, y, width, height, .map(({ x, y, width, height }) => tf.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels]));
}) => tf.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels]));
return faceTensors; return faceTensors;
}); });
} }

12
src/dom/fetchVideo.ts Normal file
View File

@ -0,0 +1,12 @@
import { bufferToVideo } from './bufferToVideo';
import { fetchOrThrow } from './fetchOrThrow';
export async function fetchVideo(uri: string): Promise<HTMLVideoElement> {
const res = await fetchOrThrow(uri);
const blob = await (res).blob();
if (!blob.type.startsWith('video/')) {
throw new Error(`fetchVideo - expected blob type to be of type video/*, instead have: ${blob.type}, for url: ${res.url}`);
}
return bufferToVideo(blob);
}

View File

@ -3,21 +3,10 @@ import { resolveInput } from './resolveInput';
export function getContext2dOrThrow(canvasArg: string | HTMLCanvasElement | CanvasRenderingContext2D): CanvasRenderingContext2D { export function getContext2dOrThrow(canvasArg: string | HTMLCanvasElement | CanvasRenderingContext2D): CanvasRenderingContext2D {
const { Canvas, CanvasRenderingContext2D } = env.getEnv(); const { Canvas, CanvasRenderingContext2D } = env.getEnv();
if (canvasArg instanceof CanvasRenderingContext2D) return canvasArg;
if (canvasArg instanceof CanvasRenderingContext2D) {
return canvasArg;
}
const canvas = resolveInput(canvasArg); const canvas = resolveInput(canvasArg);
if (!(canvas instanceof Canvas)) throw new Error('resolveContext2d - expected canvas to be of instance of Canvas');
if (!(canvas instanceof Canvas)) { const ctx = canvas.getContext('2d', { willReadFrequently: true });
throw new Error('resolveContext2d - expected canvas to be of instance of Canvas'); if (!ctx) throw new Error('resolveContext2d - canvas 2d context is null');
}
const ctx = canvas.getContext('2d');
if (!ctx) {
throw new Error('resolveContext2d - canvas 2d context is null');
}
return ctx; return ctx;
} }

View File

@ -11,7 +11,7 @@ export async function imageTensorToCanvas(
const [height, width, numChannels] = imgTensor.shape.slice(isTensor4D(imgTensor) ? 1 : 0); const [height, width, numChannels] = imgTensor.shape.slice(isTensor4D(imgTensor) ? 1 : 0);
const imgTensor3D = tf.tidy(() => imgTensor.as3D(height, width, numChannels).toInt()); const imgTensor3D = tf.tidy(() => imgTensor.as3D(height, width, numChannels).toInt());
await tf.browser.toPixels(imgTensor3D, targetCanvas); await tf['browser'].toPixels(imgTensor3D, targetCanvas);
imgTensor3D.dispose(); imgTensor3D.dispose();

View File

@ -3,7 +3,7 @@ import { createCanvas, createCanvasFromMedia } from './createCanvas';
import { getContext2dOrThrow } from './getContext2dOrThrow'; import { getContext2dOrThrow } from './getContext2dOrThrow';
import { getMediaDimensions } from './getMediaDimensions'; import { getMediaDimensions } from './getMediaDimensions';
export function imageToSquare(input: HTMLImageElement | HTMLCanvasElement, inputSize: number, centerImage: boolean = false) { export function imageToSquare(input: HTMLImageElement | HTMLCanvasElement, inputSize: number, centerImage = false) {
const { Image, Canvas } = env.getEnv(); const { Image, Canvas } = env.getEnv();
if (!(input instanceof Image || input instanceof Canvas)) { if (!(input instanceof Image || input instanceof Canvas)) {

View File

@ -7,6 +7,7 @@ export * from './fetchImage';
export * from './fetchJson'; export * from './fetchJson';
export * from './fetchNetWeights'; export * from './fetchNetWeights';
export * from './fetchOrThrow'; export * from './fetchOrThrow';
export * from './fetchVideo';
export * from './getContext2dOrThrow'; export * from './getContext2dOrThrow';
export * from './getMediaDimensions'; export * from './getMediaDimensions';
export * from './imageTensorToCanvas'; export * from './imageTensorToCanvas';

View File

@ -8,7 +8,8 @@ export async function loadWeightMap(
defaultModelName: string, defaultModelName: string,
): Promise<tf.NamedTensorMap> { ): Promise<tf.NamedTensorMap> {
const { manifestUri, modelBaseUri } = getModelUris(uri, defaultModelName); const { manifestUri, modelBaseUri } = getModelUris(uri, defaultModelName);
// @ts-ignore
const manifest = await fetchJson<tf.io.WeightsManifestConfig>(manifestUri); const manifest = await fetchJson<tf.io.WeightsManifestConfig>(manifestUri);
// if (manifest['weightsManifest']) manifest = manifest['weightsManifest']; // if (manifest['weightsManifest']) manifest = manifest['weightsManifest'];
return tf.io.loadWeights(manifest, modelBaseUri); return tf['io'].loadWeights(manifest, modelBaseUri);
} }

View File

@ -1,7 +1,7 @@
import { IDimensions } from '../classes/index'; import { IDimensions } from '../classes/index';
import { getMediaDimensions } from './getMediaDimensions'; import { getMediaDimensions } from './getMediaDimensions';
export function matchDimensions(input: IDimensions, reference: IDimensions, useMediaDimensions: boolean = false) { export function matchDimensions(input: IDimensions, reference: IDimensions, useMediaDimensions = false) {
const { width, height } = useMediaDimensions const { width, height } = useMediaDimensions
? getMediaDimensions(reference) ? getMediaDimensions(reference)
: reference; : reference;

View File

@ -1,11 +1,9 @@
import * as tf from '../../dist/tfjs.esm'; import type { Tensor3D, Tensor4D } from '../../dist/tfjs.esm';
import { NetInput } from './NetInput'; import { NetInput } from './NetInput';
export type TMediaElement = HTMLImageElement | HTMLVideoElement | HTMLCanvasElement export type TMediaElement = HTMLImageElement | HTMLVideoElement | HTMLCanvasElement
export type TResolvedNetInput = TMediaElement | tf.Tensor3D | tf.Tensor4D export type TResolvedNetInput = TMediaElement | Tensor3D | Tensor4D
export type TNetInputArg = string | TResolvedNetInput export type TNetInput = string | TResolvedNetInput | Array<string | TResolvedNetInput> | NetInput
export type TNetInput = TNetInputArg | Array<TNetInputArg> | NetInput | tf.Tensor4D

View File

@ -11,13 +11,13 @@ export interface IDrawBoxOptions {
} }
export class DrawBoxOptions { export class DrawBoxOptions {
public boxColor: string public boxColor: string;
public lineWidth: number public lineWidth: number;
public drawLabelOptions: DrawTextFieldOptions public drawLabelOptions: DrawTextFieldOptions;
public label?: string public label?: string;
constructor(options: IDrawBoxOptions = {}) { constructor(options: IDrawBoxOptions = {}) {
const { const {
@ -36,9 +36,9 @@ export class DrawBoxOptions {
} }
export class DrawBox { export class DrawBox {
public box: Box public box: Box;
public options: DrawBoxOptions public options: DrawBoxOptions;
constructor( constructor(
box: IBoundingBox | IRect, box: IBoundingBox | IRect,

View File

@ -17,17 +17,17 @@ export interface IDrawFaceLandmarksOptions {
} }
export class DrawFaceLandmarksOptions { export class DrawFaceLandmarksOptions {
public drawLines: boolean public drawLines: boolean;
public drawPoints: boolean public drawPoints: boolean;
public lineWidth: number public lineWidth: number;
public pointSize: number public pointSize: number;
public lineColor: string public lineColor: string;
public pointColor: string public pointColor: string;
constructor(options: IDrawFaceLandmarksOptions = {}) { constructor(options: IDrawFaceLandmarksOptions = {}) {
const { const {
@ -43,9 +43,9 @@ export class DrawFaceLandmarksOptions {
} }
export class DrawFaceLandmarks { export class DrawFaceLandmarks {
public faceLandmarks: FaceLandmarks public faceLandmarks: FaceLandmarks;
public options: DrawFaceLandmarksOptions public options: DrawFaceLandmarksOptions;
constructor( constructor(
faceLandmarks: FaceLandmarks, faceLandmarks: FaceLandmarks,

View File

@ -25,17 +25,17 @@ export interface IDrawTextFieldOptions {
} }
export class DrawTextFieldOptions implements IDrawTextFieldOptions { export class DrawTextFieldOptions implements IDrawTextFieldOptions {
public anchorPosition: AnchorPosition public anchorPosition: AnchorPosition;
public backgroundColor: string public backgroundColor: string;
public fontColor: string public fontColor: string;
public fontSize: number public fontSize: number;
public fontStyle: string public fontStyle: string;
public padding: number public padding: number;
constructor(options: IDrawTextFieldOptions = {}) { constructor(options: IDrawTextFieldOptions = {}) {
const { const {
@ -51,11 +51,11 @@ export class DrawTextFieldOptions implements IDrawTextFieldOptions {
} }
export class DrawTextField { export class DrawTextField {
public text: string[] public text: string[];
public anchor : IPoint public anchor : IPoint;
public options: DrawTextFieldOptions public options: DrawTextFieldOptions;
constructor( constructor(
text: string | string[] | DrawTextField, text: string | string[] | DrawTextField,

View File

@ -3,7 +3,7 @@ import { Point } from '../classes/index';
export function drawContour( export function drawContour(
ctx: CanvasRenderingContext2D, ctx: CanvasRenderingContext2D,
points: Point[], points: Point[],
isClosed: boolean = false, isClosed = false,
) { ) {
ctx.beginPath(); ctx.beginPath();

View File

@ -7,12 +7,7 @@ import { DrawTextField } from './DrawTextField';
export type DrawFaceExpressionsInput = FaceExpressions | WithFaceExpressions<{}> export type DrawFaceExpressionsInput = FaceExpressions | WithFaceExpressions<{}>
export function drawFaceExpressions( export function drawFaceExpressions(canvasArg: string | HTMLCanvasElement, faceExpressions: DrawFaceExpressionsInput | Array<DrawFaceExpressionsInput>, minConfidence = 0.1, textFieldAnchor?: IPoint) {
canvasArg: string | HTMLCanvasElement,
faceExpressions: DrawFaceExpressionsInput | Array<DrawFaceExpressionsInput>,
minConfidence = 0.1,
textFieldAnchor?: IPoint,
) {
const faceExpressionsArray = Array.isArray(faceExpressions) ? faceExpressions : [faceExpressions]; const faceExpressionsArray = Array.isArray(faceExpressions) ? faceExpressions : [faceExpressions];
faceExpressionsArray.forEach((e) => { faceExpressionsArray.forEach((e) => {

View File

@ -16,6 +16,7 @@ export function createBrowserEnv(): Environment {
Video: HTMLVideoElement, Video: HTMLVideoElement,
createCanvasElement: () => document.createElement('canvas'), createCanvasElement: () => document.createElement('canvas'),
createImageElement: () => document.createElement('img'), createImageElement: () => document.createElement('img'),
createVideoElement: () => document.createElement('video'),
fetch, fetch,
readFile, readFile,
}; };

View File

@ -1,26 +1,20 @@
import { FileSystem } from './types'; import { FileSystem } from './types';
import { isNodejs } from './isNodejs';
export function createFileSystem(fs?: any): FileSystem { export function createFileSystem(fs?: any): FileSystem {
let requireFsError = ''; let requireFsError = '';
if (!fs && isNodejs()) {
if (!fs) {
try { try {
// eslint-disable-next-line global-require // eslint-disable-next-line global-require, @typescript-eslint/no-require-imports
fs = require('fs'); fs = require('fs');
} catch (err) { } catch (err) {
requireFsError = err.toString(); requireFsError = (err as any).toString();
} }
} }
const readFile = fs const readFile = fs
? (filePath: string) => new Promise<Buffer>((resolve, reject) => { // eslint-disable-next-line no-undef
fs.readFile(filePath, (err: any, buffer: Buffer) => (err ? reject(err) : resolve(buffer))); ? (filePath: string) => new Promise<string | Buffer>((resolve, reject) => { fs.readFile(filePath, (err: NodeJS.ErrnoException | null, buffer: string | Buffer) => (err ? reject(err) : resolve(buffer))); })
}) : () => { throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`); };
: () => { return { readFile };
throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`);
};
return {
readFile,
};
} }

View File

@ -3,9 +3,9 @@ import { createFileSystem } from './createFileSystem';
import { Environment } from './types'; import { Environment } from './types';
export function createNodejsEnv(): Environment { export function createNodejsEnv(): Environment {
// eslint-disable-next-line dot-notation const Canvas: (new () => HTMLCanvasElement) = (global as any)['Canvas'] || global.HTMLCanvasElement;
const Canvas = global['Canvas'] || global.HTMLCanvasElement;
const Image = global.Image || global.HTMLImageElement; const Image = global.Image || global.HTMLImageElement;
const Video: (new () => HTMLVideoElement) = (global as any)['Video'] || global.HTMLVideoElement;
const createCanvasElement = () => { const createCanvasElement = () => {
if (Canvas) return new Canvas(); if (Canvas) return new Canvas();
@ -17,6 +17,11 @@ export function createNodejsEnv(): Environment {
throw new Error('createImageElement - missing Image implementation for nodejs environment'); throw new Error('createImageElement - missing Image implementation for nodejs environment');
}; };
const createVideoElement = () => {
if (Video) return new Video();
throw new Error('createVideoElement - missing Video implementation for nodejs environment');
};
const fetch = global.fetch; const fetch = global.fetch;
// if (!fetch) throw new Error('fetch - missing fetch implementation for nodejs environment'); // if (!fetch) throw new Error('fetch - missing fetch implementation for nodejs environment');
@ -30,6 +35,7 @@ export function createNodejsEnv(): Environment {
Video: global.HTMLVideoElement || class {}, Video: global.HTMLVideoElement || class {},
createCanvasElement, createCanvasElement,
createImageElement, createImageElement,
createVideoElement,
fetch, fetch,
...fileSystem, ...fileSystem,
}; };

6
src/env/isNodejs.ts vendored
View File

@ -1,6 +1,6 @@
export function isNodejs(): boolean { export function isNodejs(): boolean {
return typeof global === 'object' return typeof global === 'object'
&& typeof require === 'function' && typeof process !== 'undefined'
&& typeof module !== 'undefined' && process.versions != null
&& typeof process !== 'undefined' && !!process.version; && process.versions.node != null;
} }

23
src/env/types.ts vendored
View File

@ -1,16 +1,17 @@
export type FileSystem = { export type FileSystem = {
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
readFile: (filePath: string) => Promise<Buffer> readFile: (filePath: string) => Promise<string | Buffer>;
} };
export type Environment = FileSystem & { export type Environment = FileSystem & {
Canvas: typeof HTMLCanvasElement Canvas: typeof HTMLCanvasElement;
CanvasRenderingContext2D: typeof CanvasRenderingContext2D CanvasRenderingContext2D: typeof CanvasRenderingContext2D;
Image: typeof HTMLImageElement Image: typeof HTMLImageElement;
ImageData: typeof ImageData ImageData: typeof ImageData;
Video: typeof HTMLVideoElement Video: typeof HTMLVideoElement;
createCanvasElement: () => HTMLCanvasElement createCanvasElement: () => HTMLCanvasElement;
createImageElement: () => HTMLImageElement createImageElement: () => HTMLImageElement;
createVideoElement: () => HTMLVideoElement;
// eslint-disable-next-line no-undef, no-unused-vars // eslint-disable-next-line no-undef, no-unused-vars
fetch: (url: string, init?: RequestInit) => Promise<Response> fetch: (url: string, init?: RequestInit) => Promise<Response>;
} };

View File

@ -1,12 +1,10 @@
export function euclideanDistance(arr1: number[] | Float32Array, arr2: number[] | Float32Array) { export function euclideanDistance(arr1: number[] | Float32Array, arr2: number[] | Float32Array) {
if (arr1.length !== arr2.length) throw new Error('euclideanDistance: arr1.length !== arr2.length'); if (arr1.length !== arr2.length) throw new Error('euclideanDistance: arr1.length !== arr2.length');
const desc1 = Array.from(arr1); const desc1 = Array.from(arr1);
const desc2 = Array.from(arr2); const desc2 = Array.from(arr2);
return Math.sqrt( return Math.sqrt(
desc1 desc1
.map((val, i) => val - desc2[i]) .map((val, i) => val - desc2[i])
.reduce((res, diff) => res + (diff ** 2), 0), .reduce((res, diff) => res + (diff * diff), 0),
); );
} }

View File

@ -1,19 +1,13 @@
export const FACE_EXPRESSION_LABELS = ['neutral', 'happy', 'sad', 'angry', 'fearful', 'disgusted', 'surprised']; export const FACE_EXPRESSION_LABELS = ['neutral', 'happy', 'sad', 'angry', 'fearful', 'disgusted', 'surprised'] as const;
export class FaceExpressions { export class FaceExpressions {
public neutral: number public neutral = 0;
public happy = 0;
public happy: number public sad = 0;
public angry = 0;
public sad: number public fearful = 0;
public disgusted = 0;
public angry: number public surprised = 0;
public fearful: number
public disgusted: number
public surprised: number
constructor(probabilities: number[] | Float32Array) { constructor(probabilities: number[] | Float32Array) {
if (probabilities.length !== 7) { if (probabilities.length !== 7) {

View File

@ -7,7 +7,7 @@ import { DenseBlock3Params, DenseBlock4Params } from './types';
export function denseBlock3( export function denseBlock3(
x: tf.Tensor4D, x: tf.Tensor4D,
denseBlockParams: DenseBlock3Params, denseBlockParams: DenseBlock3Params,
isFirstLayer: boolean = false, isFirstLayer = false,
): tf.Tensor4D { ): tf.Tensor4D {
return tf.tidy(() => { return tf.tidy(() => {
const out1 = tf.relu( const out1 = tf.relu(
@ -30,8 +30,8 @@ export function denseBlock3(
export function denseBlock4( export function denseBlock4(
x: tf.Tensor4D, x: tf.Tensor4D,
denseBlockParams: DenseBlock4Params, denseBlockParams: DenseBlock4Params,
isFirstLayer: boolean = false, isFirstLayer = false,
isScaleDown: boolean = true, isScaleDown = true,
): tf.Tensor4D { ): tf.Tensor4D {
return tf.tidy(() => { return tf.tidy(() => {
const out1 = tf.relu( const out1 = tf.relu(

View File

@ -5,7 +5,7 @@ export function extractorsFactory(extractWeights: ExtractWeightsFunction, paramM
const extractConvParams = extractConvParamsFactory(extractWeights, paramMappings); const extractConvParams = extractConvParamsFactory(extractWeights, paramMappings);
const extractSeparableConvParams = extractSeparableConvParamsFactory(extractWeights, paramMappings); const extractSeparableConvParams = extractSeparableConvParamsFactory(extractWeights, paramMappings);
function extractDenseBlock3Params(channelsIn: number, channelsOut: number, mappedPrefix: string, isFirstLayer: boolean = false): DenseBlock3Params { function extractDenseBlock3Params(channelsIn: number, channelsOut: number, mappedPrefix: string, isFirstLayer = false): DenseBlock3Params {
const conv0 = isFirstLayer const conv0 = isFirstLayer
? extractConvParams(channelsIn, channelsOut, 3, `${mappedPrefix}/conv0`) ? extractConvParams(channelsIn, channelsOut, 3, `${mappedPrefix}/conv0`)
: extractSeparableConvParams(channelsIn, channelsOut, `${mappedPrefix}/conv0`); : extractSeparableConvParams(channelsIn, channelsOut, `${mappedPrefix}/conv0`);
@ -15,7 +15,7 @@ export function extractorsFactory(extractWeights: ExtractWeightsFunction, paramM
return { conv0, conv1, conv2 }; return { conv0, conv1, conv2 };
} }
function extractDenseBlock4Params(channelsIn: number, channelsOut: number, mappedPrefix: string, isFirstLayer: boolean = false): DenseBlock4Params { function extractDenseBlock4Params(channelsIn: number, channelsOut: number, mappedPrefix: string, isFirstLayer = false): DenseBlock4Params {
const { conv0, conv1, conv2 } = extractDenseBlock3Params(channelsIn, channelsOut, mappedPrefix, isFirstLayer); const { conv0, conv1, conv2 } = extractDenseBlock3Params(channelsIn, channelsOut, mappedPrefix, isFirstLayer);
const conv3 = extractSeparableConvParams(channelsOut, channelsOut, `${mappedPrefix}/conv3`); const conv3 = extractSeparableConvParams(channelsOut, channelsOut, `${mappedPrefix}/conv3`);

View File

@ -8,7 +8,7 @@ export function loadParamsFactory(weightMap: any, paramMappings: ParamMapping[])
const extractConvParams = loadConvParamsFactory(extractWeightEntry); const extractConvParams = loadConvParamsFactory(extractWeightEntry);
const extractSeparableConvParams = loadSeparableConvParamsFactory(extractWeightEntry); const extractSeparableConvParams = loadSeparableConvParamsFactory(extractWeightEntry);
function extractDenseBlock3Params(prefix: string, isFirstLayer: boolean = false): DenseBlock3Params { function extractDenseBlock3Params(prefix: string, isFirstLayer = false): DenseBlock3Params {
const conv0 = isFirstLayer const conv0 = isFirstLayer
? extractConvParams(`${prefix}/conv0`) ? extractConvParams(`${prefix}/conv0`)
: extractSeparableConvParams(`${prefix}/conv0`); : extractSeparableConvParams(`${prefix}/conv0`);
@ -18,7 +18,7 @@ export function loadParamsFactory(weightMap: any, paramMappings: ParamMapping[])
return { conv0, conv1, conv2 }; return { conv0, conv1, conv2 };
} }
function extractDenseBlock4Params(prefix: string, isFirstLayer: boolean = false): DenseBlock4Params { function extractDenseBlock4Params(prefix: string, isFirstLayer = false): DenseBlock4Params {
const conv0 = isFirstLayer const conv0 = isFirstLayer
? extractConvParams(`${prefix}/conv0`) ? extractConvParams(`${prefix}/conv0`)
: extractSeparableConvParams(`${prefix}/conv0`); : extractSeparableConvParams(`${prefix}/conv0`);

View File

@ -13,7 +13,7 @@ export abstract class FaceProcessor<
TExtractorParams extends FaceFeatureExtractorParams | TinyFaceFeatureExtractorParams TExtractorParams extends FaceFeatureExtractorParams | TinyFaceFeatureExtractorParams
> >
extends NeuralNetwork<NetParams> { extends NeuralNetwork<NetParams> {
protected _faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams> protected _faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>;
constructor(_name: string, faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>) { constructor(_name: string, faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>) {
super(_name); super(_name);
@ -24,7 +24,7 @@ export abstract class FaceProcessor<
return this._faceFeatureExtractor; return this._faceFeatureExtractor;
} }
protected abstract getDefaultModelName(): string protected abstract override getDefaultModelName(): string
protected abstract getClassifierChannelsIn(): number protected abstract getClassifierChannelsIn(): number
@ -45,7 +45,7 @@ export abstract class FaceProcessor<
}); });
} }
public dispose(throwOnRedispose: boolean = true) { public override dispose(throwOnRedispose = true) {
this.faceFeatureExtractor.dispose(throwOnRedispose); this.faceFeatureExtractor.dispose(throwOnRedispose);
super.dispose(throwOnRedispose); super.dispose(throwOnRedispose);
} }

View File

@ -51,7 +51,7 @@ export class FaceRecognitionNet extends NeuralNetwork<NetParams> {
const globalAvg = out.mean([1, 2]) as tf.Tensor2D; const globalAvg = out.mean([1, 2]) as tf.Tensor2D;
const fullyConnected = tf.matMul(globalAvg, params.fc); const fullyConnected = tf.matMul(globalAvg, params.fc);
return fullyConnected; return fullyConnected as tf.Tensor2D;
}); });
} }
@ -60,6 +60,7 @@ export class FaceRecognitionNet extends NeuralNetwork<NetParams> {
} }
public async computeFaceDescriptor(input: TNetInput): Promise<Float32Array|Float32Array[]> { public async computeFaceDescriptor(input: TNetInput): Promise<Float32Array|Float32Array[]> {
// @ts-ignore
if (input?.shape?.some((dim) => dim <= 0)) return new Float32Array(128); if (input?.shape?.some((dim) => dim <= 0)) return new Float32Array(128);
const netInput = await toNetInput(input); const netInput = await toNetInput(input);
const faceDescriptorTensors = tf.tidy(() => tf.unstack(this.forwardInput(netInput))); const faceDescriptorTensors = tf.tidy(() => tf.unstack(this.forwardInput(netInput)));

Some files were not shown because too many files have changed in this diff Show More