initial commit
parent
1ff72da23c
commit
08df1073e6
|
@ -0,0 +1,3 @@
|
||||||
|
node_modules
|
||||||
|
build
|
||||||
|
dist
|
2
LICENSE
2
LICENSE
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2020 Vladimir Mandic
|
Copyright (c) 2018 Vincent Mühler
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
36
README.md
36
README.md
|
@ -1,2 +1,34 @@
|
||||||
# face-api
|
# FaceAPI
|
||||||
Branched from https://github.com/justadudewhohacks/face-api.js
|
|
||||||
|
## Branch Point
|
||||||
|
|
||||||
|
Branched from face-api.js version **0.22.2** released on March 22nd, 2020
|
||||||
|
<https://github.com/justadudewhohacks/face-api.js>
|
||||||
|
<https://www.npmjs.com/package/face-api.js>
|
||||||
|
|
||||||
|
## Note
|
||||||
|
|
||||||
|
I don't plan to maintain a separate distribution of face-api, this is only a temporary repository to use latest available face-api with latest available tensorflow/js.
|
||||||
|
If original repository is updated, this one will become obsolete.
|
||||||
|
|
||||||
|
## Differences
|
||||||
|
|
||||||
|
- Removed tests, docs, examples
|
||||||
|
- Updated all package dependencies
|
||||||
|
- Removed unnecesary package dependencies (karma, jasmine, etc.)
|
||||||
|
- Updated Typescript build process to target ES2020 instead of dual ES5/ES6
|
||||||
|
- Modified to make compatible with TensorFlow/JS 2.0+
|
||||||
|
- Trivial code changes for updated Typescript type checking
|
||||||
|
- Changed browser bundle process to use ESBuild instead of Rollup
|
||||||
|
- Updated tfjs dependencies since backends were removed from tfjs-core
|
||||||
|
|
||||||
|
- Removed following models as they are either obsolete or non-functional with tfjs 2.0+
|
||||||
|
- ssdMobilenetv1: Relies on batchNorm() function which is made obsolete in tfjs 2.0
|
||||||
|
- mtcnn: Mostly obsolete
|
||||||
|
- tinyYolov2: Non-functional since weights are missing
|
||||||
|
|
||||||
|
Which means the only valid model is **tinyFaceDetector**
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
For documentation refer to original project
|
||||||
|
|
|
@ -0,0 +1,913 @@
|
||||||
|
{
|
||||||
|
"name": "face-api",
|
||||||
|
"version": "0.3.0",
|
||||||
|
"lockfileVersion": 1,
|
||||||
|
"requires": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@tensorflow/tfjs": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-84vrkbktqnPYESZZOT2AN/LFm3qCF0UleGI7P+wgdsOYcwBoe8eZcVdhje/u9O4GYqwmn0+a8F+Kk4TmcFdrEw==",
|
||||||
|
"requires": {
|
||||||
|
"@tensorflow/tfjs-backend-cpu": "2.3.0",
|
||||||
|
"@tensorflow/tfjs-backend-webgl": "2.3.0",
|
||||||
|
"@tensorflow/tfjs-converter": "2.3.0",
|
||||||
|
"@tensorflow/tfjs-core": "2.3.0",
|
||||||
|
"@tensorflow/tfjs-data": "2.3.0",
|
||||||
|
"@tensorflow/tfjs-layers": "2.3.0",
|
||||||
|
"argparse": "^1.0.10",
|
||||||
|
"chalk": "^4.1.0",
|
||||||
|
"core-js": "3",
|
||||||
|
"regenerator-runtime": "^0.13.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"@tensorflow/tfjs-backend-cpu": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-Ycf8mZywZYJrRIcZBqrloDxBc/lmV6aLLfj18wo8WE4aX9UxLNmSXF2iSs+x8icx7YmvLLglvTVST6tzL4sUSg==",
|
||||||
|
"requires": {
|
||||||
|
"@types/seedrandom": "2.4.27",
|
||||||
|
"seedrandom": "2.4.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"@tensorflow/tfjs-backend-webgl": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-webgl/-/tfjs-backend-webgl-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-xTwAAASFPNHpIyShynCc+4Z2JtruJvEUosV28TDCNv9ZNuarxxpGimTBohjKFqzcjGIi4II0NyXbjMxTJcJkqw==",
|
||||||
|
"requires": {
|
||||||
|
"@tensorflow/tfjs-backend-cpu": "2.3.0",
|
||||||
|
"@types/offscreencanvas": "~2019.3.0",
|
||||||
|
"@types/seedrandom": "2.4.27",
|
||||||
|
"@types/webgl-ext": "0.0.30",
|
||||||
|
"@types/webgl2": "0.0.4",
|
||||||
|
"seedrandom": "2.4.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"@tensorflow/tfjs-converter": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-Q6rXAhL4XN9jo8bnXnDoc3sB3dC5T/gIWnLX+DGmDfgIARBCKG+QZdemBvg64qw48gdK6oxd75aaTsvCjuQ6ww=="
|
||||||
|
},
|
||||||
|
"@tensorflow/tfjs-core": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-XJ8B/VQexYVtyyoIUCqzIbEgwaWZzv9lq1E4LKafJeuT9DldJGHS7IWIkNnZDBfmQvfYHU+fAvGRRPdQPoYeRg==",
|
||||||
|
"requires": {
|
||||||
|
"@types/offscreencanvas": "~2019.3.0",
|
||||||
|
"@types/seedrandom": "2.4.27",
|
||||||
|
"@types/webgl-ext": "0.0.30",
|
||||||
|
"@types/webgl2": "0.0.4",
|
||||||
|
"node-fetch": "~2.1.2",
|
||||||
|
"seedrandom": "2.4.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"@tensorflow/tfjs-data": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-data/-/tfjs-data-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-uDSZTDJaoR4axYVate22gspdNJC89iqW9IHf8wC9iMZ0xSBWoX9qChcI3xpTzP5couHxaBSUkFVXczuqvnFsfg==",
|
||||||
|
"requires": {
|
||||||
|
"@types/node-fetch": "^2.1.2",
|
||||||
|
"node-fetch": "~2.1.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"@tensorflow/tfjs-layers": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-layers/-/tfjs-layers-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-8bXikhI/SvnOxRDELBxVB2kYKH78pQEEeXheMrSD38CA/QoKbH7sdPPmP7qpEz8kzU/k5sXnn8c5CIAFpO7sug=="
|
||||||
|
},
|
||||||
|
"@types/color-name": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ=="
|
||||||
|
},
|
||||||
|
"@types/node": {
|
||||||
|
"version": "14.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.6.0.tgz",
|
||||||
|
"integrity": "sha512-mikldZQitV94akrc4sCcSjtJfsTKt4p+e/s0AGscVA6XArQ9kFclP+ZiYUMnq987rc6QlYxXv/EivqlfSLxpKA=="
|
||||||
|
},
|
||||||
|
"@types/node-fetch": {
|
||||||
|
"version": "2.5.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.7.tgz",
|
||||||
|
"integrity": "sha512-o2WVNf5UhWRkxlf6eq+jMZDu7kjgpgJfl4xVNlvryc95O/6F2ld8ztKX+qu+Rjyet93WAWm5LjeX9H5FGkODvw==",
|
||||||
|
"requires": {
|
||||||
|
"@types/node": "*",
|
||||||
|
"form-data": "^3.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"@types/offscreencanvas": {
|
||||||
|
"version": "2019.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz",
|
||||||
|
"integrity": "sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q=="
|
||||||
|
},
|
||||||
|
"@types/seedrandom": {
|
||||||
|
"version": "2.4.27",
|
||||||
|
"resolved": "http://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.27.tgz",
|
||||||
|
"integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE="
|
||||||
|
},
|
||||||
|
"@types/webgl-ext": {
|
||||||
|
"version": "0.0.30",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.30.tgz",
|
||||||
|
"integrity": "sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg=="
|
||||||
|
},
|
||||||
|
"@types/webgl2": {
|
||||||
|
"version": "0.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/webgl2/-/webgl2-0.0.4.tgz",
|
||||||
|
"integrity": "sha512-PACt1xdErJbMUOUweSrbVM7gSIYm1vTncW2hF6Os/EeWi6TXYAYMPp+8v6rzHmypE5gHrxaxZNXgMkJVIdZpHw=="
|
||||||
|
},
|
||||||
|
"abbrev": {
|
||||||
|
"version": "1.0.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.9.tgz",
|
||||||
|
"integrity": "sha1-kbR5JYinc4wl813W9jdSovh3YTU=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"ansi-regex": {
|
||||||
|
"version": "2.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
|
||||||
|
"integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"ansi-styles": {
|
||||||
|
"version": "4.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
|
||||||
|
"integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
|
||||||
|
"requires": {
|
||||||
|
"@types/color-name": "^1.1.1",
|
||||||
|
"color-convert": "^2.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"aproba": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"are-we-there-yet": {
|
||||||
|
"version": "1.1.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz",
|
||||||
|
"integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"delegates": "^1.0.0",
|
||||||
|
"readable-stream": "^2.0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"arg": {
|
||||||
|
"version": "4.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
|
||||||
|
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"argparse": {
|
||||||
|
"version": "1.0.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
|
||||||
|
"integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
|
||||||
|
"requires": {
|
||||||
|
"sprintf-js": "~1.0.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"asynckit": {
|
||||||
|
"version": "0.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||||
|
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
|
||||||
|
},
|
||||||
|
"balanced-match": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"brace-expansion": {
|
||||||
|
"version": "1.1.11",
|
||||||
|
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||||
|
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"balanced-match": "^1.0.0",
|
||||||
|
"concat-map": "0.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"buffer-from": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"canvas": {
|
||||||
|
"version": "2.6.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/canvas/-/canvas-2.6.1.tgz",
|
||||||
|
"integrity": "sha512-S98rKsPcuhfTcYbtF53UIJhcbgIAK533d1kJKMwsMwAIFgfd58MOyxRud3kktlzWiEkFliaJtvyZCBtud/XVEA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"nan": "^2.14.0",
|
||||||
|
"node-pre-gyp": "^0.11.0",
|
||||||
|
"simple-get": "^3.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"chalk": {
|
||||||
|
"version": "4.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz",
|
||||||
|
"integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==",
|
||||||
|
"requires": {
|
||||||
|
"ansi-styles": "^4.1.0",
|
||||||
|
"supports-color": "^7.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"chownr": {
|
||||||
|
"version": "1.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.3.tgz",
|
||||||
|
"integrity": "sha512-i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"code-point-at": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz",
|
||||||
|
"integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"color-convert": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||||
|
"requires": {
|
||||||
|
"color-name": "~1.1.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"color-name": {
|
||||||
|
"version": "1.1.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||||
|
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
|
||||||
|
},
|
||||||
|
"combined-stream": {
|
||||||
|
"version": "1.0.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
|
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||||
|
"requires": {
|
||||||
|
"delayed-stream": "~1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"concat-map": {
|
||||||
|
"version": "0.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||||
|
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"console-control-strings": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
|
||||||
|
"integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"core-js": {
|
||||||
|
"version": "3.6.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz",
|
||||||
|
"integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA=="
|
||||||
|
},
|
||||||
|
"core-util-is": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
|
||||||
|
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"debug": {
|
||||||
|
"version": "3.2.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz",
|
||||||
|
"integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"ms": "^2.1.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"decompress-response": {
|
||||||
|
"version": "4.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz",
|
||||||
|
"integrity": "sha512-jOSne2qbyE+/r8G1VU+G/82LBs2Fs4LAsTiLSHOCOMZQl2OKZ6i8i4IyHemTe+/yIXOtTcRQMzPcgyhoFlqPkw==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"mimic-response": "^2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"deep-extend": {
|
||||||
|
"version": "0.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
|
||||||
|
"integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"delayed-stream": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk="
|
||||||
|
},
|
||||||
|
"delegates": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"detect-libc": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
|
||||||
|
"integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"diff": {
|
||||||
|
"version": "4.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
|
||||||
|
"integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"esbuild": {
|
||||||
|
"version": "0.6.25",
|
||||||
|
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.6.25.tgz",
|
||||||
|
"integrity": "sha512-yOSDMlo0tmhF023LL37UVK/WHIM8UttxiUM0iOHb/DgB9Mk02wQAAHEso2ZkTY3ImpGDRQM0JnIif7aZAmGm2w=="
|
||||||
|
},
|
||||||
|
"form-data": {
|
||||||
|
"version": "3.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.0.tgz",
|
||||||
|
"integrity": "sha512-CKMFDglpbMi6PyN+brwB9Q/GOw0eAnsrEZDgcsH5Krhz5Od/haKHAX0NmQfha2zPPz0JpWzA7GJHGSnvCRLWsg==",
|
||||||
|
"requires": {
|
||||||
|
"asynckit": "^0.4.0",
|
||||||
|
"combined-stream": "^1.0.8",
|
||||||
|
"mime-types": "^2.1.12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fs-minipass": {
|
||||||
|
"version": "1.2.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz",
|
||||||
|
"integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"minipass": "^2.6.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fs.realpath": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"gauge": {
|
||||||
|
"version": "2.7.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz",
|
||||||
|
"integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"aproba": "^1.0.3",
|
||||||
|
"console-control-strings": "^1.0.0",
|
||||||
|
"has-unicode": "^2.0.0",
|
||||||
|
"object-assign": "^4.1.0",
|
||||||
|
"signal-exit": "^3.0.0",
|
||||||
|
"string-width": "^1.0.1",
|
||||||
|
"strip-ansi": "^3.0.1",
|
||||||
|
"wide-align": "^1.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"glob": {
|
||||||
|
"version": "7.1.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
|
||||||
|
"integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"fs.realpath": "^1.0.0",
|
||||||
|
"inflight": "^1.0.4",
|
||||||
|
"inherits": "2",
|
||||||
|
"minimatch": "^3.0.4",
|
||||||
|
"once": "^1.3.0",
|
||||||
|
"path-is-absolute": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"has-flag": {
|
||||||
|
"version": "4.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||||
|
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="
|
||||||
|
},
|
||||||
|
"has-unicode": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
|
||||||
|
"integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"iconv-lite": {
|
||||||
|
"version": "0.4.24",
|
||||||
|
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||||
|
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"safer-buffer": ">= 2.1.2 < 3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ignore-walk": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"minimatch": "^3.0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"inflight": {
|
||||||
|
"version": "1.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
|
||||||
|
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"once": "^1.3.0",
|
||||||
|
"wrappy": "1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"inherits": {
|
||||||
|
"version": "2.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||||
|
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"ini": {
|
||||||
|
"version": "1.3.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
|
||||||
|
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"is-fullwidth-code-point": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"number-is-nan": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isarray": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"make-error": {
|
||||||
|
"version": "1.3.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
|
||||||
|
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"mime-db": {
|
||||||
|
"version": "1.44.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.44.0.tgz",
|
||||||
|
"integrity": "sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg=="
|
||||||
|
},
|
||||||
|
"mime-types": {
|
||||||
|
"version": "2.1.27",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.27.tgz",
|
||||||
|
"integrity": "sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==",
|
||||||
|
"requires": {
|
||||||
|
"mime-db": "1.44.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mimic-response": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"minimatch": {
|
||||||
|
"version": "3.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||||
|
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"brace-expansion": "^1.1.7"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"minimist": {
|
||||||
|
"version": "1.2.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
|
||||||
|
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"minipass": {
|
||||||
|
"version": "2.9.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz",
|
||||||
|
"integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"safe-buffer": "^5.1.2",
|
||||||
|
"yallist": "^3.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"minizlib": {
|
||||||
|
"version": "1.3.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz",
|
||||||
|
"integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"minipass": "^2.9.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mkdirp": {
|
||||||
|
"version": "0.5.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz",
|
||||||
|
"integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"minimist": "^1.2.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ms": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||||
|
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"nan": {
|
||||||
|
"version": "2.14.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.1.tgz",
|
||||||
|
"integrity": "sha512-isWHgVjnFjh2x2yuJ/tj3JbwoHu3UC2dX5G/88Cm24yB6YopVgxvBObDY7n5xW6ExmFhJpSEQqFPvq9zaXc8Jw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"needle": {
|
||||||
|
"version": "2.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/needle/-/needle-2.4.0.tgz",
|
||||||
|
"integrity": "sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"debug": "^3.2.6",
|
||||||
|
"iconv-lite": "^0.4.4",
|
||||||
|
"sax": "^1.2.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node-fetch": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.1.2.tgz",
|
||||||
|
"integrity": "sha1-q4hOjn5X44qUR1POxwb3iNF2i7U="
|
||||||
|
},
|
||||||
|
"node-pre-gyp": {
|
||||||
|
"version": "0.11.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.11.0.tgz",
|
||||||
|
"integrity": "sha512-TwWAOZb0j7e9eGaf9esRx3ZcLaE5tQ2lvYy1pb5IAaG1a2e2Kv5Lms1Y4hpj+ciXJRofIxxlt5haeQ/2ANeE0Q==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"detect-libc": "^1.0.2",
|
||||||
|
"mkdirp": "^0.5.1",
|
||||||
|
"needle": "^2.2.1",
|
||||||
|
"nopt": "^4.0.1",
|
||||||
|
"npm-packlist": "^1.1.6",
|
||||||
|
"npmlog": "^4.0.2",
|
||||||
|
"rc": "^1.2.7",
|
||||||
|
"rimraf": "^2.6.1",
|
||||||
|
"semver": "^5.3.0",
|
||||||
|
"tar": "^4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nopt": {
|
||||||
|
"version": "4.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.1.tgz",
|
||||||
|
"integrity": "sha1-0NRoWv1UFRk8jHUFYC0NF81kR00=",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"abbrev": "1",
|
||||||
|
"osenv": "^0.1.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"npm-bundled": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"npm-normalize-package-bin": "^1.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"npm-normalize-package-bin": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"npm-packlist": {
|
||||||
|
"version": "1.4.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.7.tgz",
|
||||||
|
"integrity": "sha512-vAj7dIkp5NhieaGZxBJB8fF4R0078rqsmhJcAfXZ6O7JJhjhPK96n5Ry1oZcfLXgfun0GWTZPOxaEyqv8GBykQ==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"ignore-walk": "^3.0.1",
|
||||||
|
"npm-bundled": "^1.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"npmlog": {
|
||||||
|
"version": "4.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz",
|
||||||
|
"integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"are-we-there-yet": "~1.1.2",
|
||||||
|
"console-control-strings": "~1.1.0",
|
||||||
|
"gauge": "~2.7.3",
|
||||||
|
"set-blocking": "~2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"number-is-nan": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz",
|
||||||
|
"integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"object-assign": {
|
||||||
|
"version": "4.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
|
||||||
|
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"once": {
|
||||||
|
"version": "1.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||||
|
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"wrappy": "1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"os-homedir": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
|
||||||
|
"integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"os-tmpdir": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
|
||||||
|
"integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"osenv": {
|
||||||
|
"version": "0.1.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz",
|
||||||
|
"integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"os-homedir": "^1.0.0",
|
||||||
|
"os-tmpdir": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"path-is-absolute": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
|
||||||
|
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"process-nextick-args": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"rc": {
|
||||||
|
"version": "1.2.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
|
||||||
|
"integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"deep-extend": "^0.6.0",
|
||||||
|
"ini": "~1.3.0",
|
||||||
|
"minimist": "^1.2.0",
|
||||||
|
"strip-json-comments": "~2.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"readable-stream": {
|
||||||
|
"version": "2.3.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz",
|
||||||
|
"integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"core-util-is": "~1.0.0",
|
||||||
|
"inherits": "~2.0.3",
|
||||||
|
"isarray": "~1.0.0",
|
||||||
|
"process-nextick-args": "~2.0.0",
|
||||||
|
"safe-buffer": "~5.1.1",
|
||||||
|
"string_decoder": "~1.1.1",
|
||||||
|
"util-deprecate": "~1.0.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"safe-buffer": {
|
||||||
|
"version": "5.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||||
|
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
|
||||||
|
"dev": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"regenerator-runtime": {
|
||||||
|
"version": "0.13.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz",
|
||||||
|
"integrity": "sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew=="
|
||||||
|
},
|
||||||
|
"rimraf": {
|
||||||
|
"version": "2.7.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
|
||||||
|
"integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"glob": "^7.1.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"safe-buffer": {
|
||||||
|
"version": "5.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||||
|
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"safer-buffer": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||||
|
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"sax": {
|
||||||
|
"version": "1.2.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
|
||||||
|
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"seedrandom": {
|
||||||
|
"version": "2.4.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz",
|
||||||
|
"integrity": "sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw="
|
||||||
|
},
|
||||||
|
"semver": {
|
||||||
|
"version": "5.7.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
|
||||||
|
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"set-blocking": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
|
||||||
|
"integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"signal-exit": {
|
||||||
|
"version": "3.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
|
||||||
|
"integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"simple-concat": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-c0TLuLbib7J9ZrL8hvn21Zl1IcY=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"simple-get": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-bCR6cP+aTdScaQCnQKbPKtJOKDp/hj9EDLJo3Nw4y1QksqaovlW/bnptB6/c1e+qmNIDHRK+oXFDdEqBT8WzUA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"decompress-response": "^4.2.0",
|
||||||
|
"once": "^1.3.1",
|
||||||
|
"simple-concat": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"source-map": {
|
||||||
|
"version": "0.6.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||||
|
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"source-map-support": {
|
||||||
|
"version": "0.5.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz",
|
||||||
|
"integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"buffer-from": "^1.0.0",
|
||||||
|
"source-map": "^0.6.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sprintf-js": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
||||||
|
"integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw="
|
||||||
|
},
|
||||||
|
"string-width": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
|
||||||
|
"integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"code-point-at": "^1.0.0",
|
||||||
|
"is-fullwidth-code-point": "^1.0.0",
|
||||||
|
"strip-ansi": "^3.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"string_decoder": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"safe-buffer": "~5.1.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"safe-buffer": {
|
||||||
|
"version": "5.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||||
|
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
|
||||||
|
"dev": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"strip-ansi": {
|
||||||
|
"version": "3.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
|
||||||
|
"integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"ansi-regex": "^2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"strip-json-comments": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
|
||||||
|
"integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"supports-color": {
|
||||||
|
"version": "7.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
|
||||||
|
"integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
|
||||||
|
"requires": {
|
||||||
|
"has-flag": "^4.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tar": {
|
||||||
|
"version": "4.4.13",
|
||||||
|
"resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz",
|
||||||
|
"integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"chownr": "^1.1.1",
|
||||||
|
"fs-minipass": "^1.2.5",
|
||||||
|
"minipass": "^2.8.6",
|
||||||
|
"minizlib": "^1.2.1",
|
||||||
|
"mkdirp": "^0.5.0",
|
||||||
|
"safe-buffer": "^5.1.2",
|
||||||
|
"yallist": "^3.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ts-node": {
|
||||||
|
"version": "8.10.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-8.10.2.tgz",
|
||||||
|
"integrity": "sha512-ISJJGgkIpDdBhWVu3jufsWpK3Rzo7bdiIXJjQc0ynKxVOVcg2oIrf2H2cejminGrptVc6q6/uynAHNCuWGbpVA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"arg": "^4.1.0",
|
||||||
|
"diff": "^4.0.1",
|
||||||
|
"make-error": "^1.1.1",
|
||||||
|
"source-map-support": "^0.5.17",
|
||||||
|
"yn": "3.1.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tslib": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-SgIkNheinmEBgx1IUNirK0TUD4X9yjjBRTqqjggWCU3pUEqIk3/Uwl3yRixYKT6WjQuGiwDv4NomL3wqRCj+CQ==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"typescript": {
|
||||||
|
"version": "3.9.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.7.tgz",
|
||||||
|
"integrity": "sha512-BLbiRkiBzAwsjut4x/dsibSTB6yWpwT5qWmC2OfuCg3GgVQCSgMs4vEctYPhsaGtd0AeuuHMkjZ2h2WG8MSzRw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"util-deprecate": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||||
|
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"wide-align": {
|
||||||
|
"version": "1.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz",
|
||||||
|
"integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"string-width": "^1.0.2 || 2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"wrappy": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||||
|
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"yallist": {
|
||||||
|
"version": "3.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
|
||||||
|
"integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"yn": {
|
||||||
|
"version": "3.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
|
||||||
|
"integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
|
||||||
|
"dev": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,41 @@
|
||||||
|
{
|
||||||
|
"name": "face-api",
|
||||||
|
"version": "0.3.0",
|
||||||
|
"description": "JavaScript API for face detection and face recognition in the browser with tensorflow.js",
|
||||||
|
"main": "./build/index.js",
|
||||||
|
"typings": "./build/index.d.ts",
|
||||||
|
"scripts": {
|
||||||
|
"compile": "tsc",
|
||||||
|
"bundle": "esbuild --bundle --outfile=./dist/face-api.js --target=es2020 --platform=browser --sourcemap --log-level=error --tsconfig=./tsconfig.json build/index.js",
|
||||||
|
"minify": "esbuild --bundle --outfile=./dist/face-api.min.js --target=es2020 --platform=browser --sourcemap --log-level=error --minify --tsconfig=./tsconfig.json build/index.js",
|
||||||
|
"build": "npm run compile && npm run bundle && npm run minify"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"face",
|
||||||
|
"detection",
|
||||||
|
"recognition",
|
||||||
|
"tensorflow",
|
||||||
|
"tf"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/vladmandic/face-api.git"
|
||||||
|
},
|
||||||
|
"author": "Vladimir Mandic <mandic00@live.com>",
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/vladmandic/face-api/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/vladmandic/face-api#readme",
|
||||||
|
"dependencies": {
|
||||||
|
"@tensorflow/tfjs": "^2.3.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^14.6.0",
|
||||||
|
"canvas": "2.6.1",
|
||||||
|
"ts-node": "^8.10.2",
|
||||||
|
"tslib": "^2.0.1",
|
||||||
|
"typescript": "^3.9.7",
|
||||||
|
"esbuild": "^0.6.25"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,159 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { ParamMapping } from './common';
|
||||||
|
import { getModelUris } from './common/getModelUris';
|
||||||
|
import { loadWeightMap } from './dom';
|
||||||
|
import { env } from './env';
|
||||||
|
|
||||||
|
export abstract class NeuralNetwork<TNetParams> {
|
||||||
|
|
||||||
|
protected _params: TNetParams | undefined = undefined
|
||||||
|
protected _paramMappings: ParamMapping[] = []
|
||||||
|
|
||||||
|
constructor(protected _name: string) {}
|
||||||
|
|
||||||
|
public get params(): TNetParams | undefined { return this._params }
|
||||||
|
public get paramMappings(): ParamMapping[] { return this._paramMappings }
|
||||||
|
public get isLoaded(): boolean { return !!this.params }
|
||||||
|
|
||||||
|
public getParamFromPath(paramPath: string): tf.Tensor {
|
||||||
|
const { obj, objProp } = this.traversePropertyPath(paramPath)
|
||||||
|
return obj[objProp]
|
||||||
|
}
|
||||||
|
|
||||||
|
public reassignParamFromPath(paramPath: string, tensor: tf.Tensor) {
|
||||||
|
const { obj, objProp } = this.traversePropertyPath(paramPath)
|
||||||
|
obj[objProp].dispose()
|
||||||
|
obj[objProp] = tensor
|
||||||
|
}
|
||||||
|
|
||||||
|
public getParamList() {
|
||||||
|
return this._paramMappings.map(({ paramPath }) => ({
|
||||||
|
path: paramPath,
|
||||||
|
tensor: this.getParamFromPath(paramPath)
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
public getTrainableParams() {
|
||||||
|
return this.getParamList().filter(param => param.tensor instanceof tf.Variable)
|
||||||
|
}
|
||||||
|
|
||||||
|
public getFrozenParams() {
|
||||||
|
return this.getParamList().filter(param => !(param.tensor instanceof tf.Variable))
|
||||||
|
}
|
||||||
|
|
||||||
|
public variable() {
|
||||||
|
this.getFrozenParams().forEach(({ path, tensor }) => {
|
||||||
|
this.reassignParamFromPath(path, tensor.variable())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public freeze() {
|
||||||
|
this.getTrainableParams().forEach(({ path, tensor: variable }) => {
|
||||||
|
const tensor = tf.tensor(variable.dataSync())
|
||||||
|
variable.dispose()
|
||||||
|
this.reassignParamFromPath(path, tensor)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public dispose(throwOnRedispose: boolean = true) {
|
||||||
|
this.getParamList().forEach(param => {
|
||||||
|
if (throwOnRedispose && param.tensor.isDisposed) {
|
||||||
|
throw new Error(`param tensor has already been disposed for path ${param.path}`)
|
||||||
|
}
|
||||||
|
param.tensor.dispose()
|
||||||
|
})
|
||||||
|
this._params = undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
public serializeParams(): Float32Array {
|
||||||
|
return new Float32Array(
|
||||||
|
this.getParamList()
|
||||||
|
.map(({ tensor }) => Array.from(tensor.dataSync()) as number[])
|
||||||
|
.reduce((flat, arr) => flat.concat(arr))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
public async load(weightsOrUrl: Float32Array | string | undefined): Promise<void> {
|
||||||
|
if (weightsOrUrl instanceof Float32Array) {
|
||||||
|
this.extractWeights(weightsOrUrl)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.loadFromUri(weightsOrUrl)
|
||||||
|
}
|
||||||
|
|
||||||
|
public async loadFromUri(uri: string | undefined) {
|
||||||
|
if (uri && typeof uri !== 'string') {
|
||||||
|
throw new Error(`${this._name}.loadFromUri - expected model uri`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const weightMap = await loadWeightMap(uri, this.getDefaultModelName())
|
||||||
|
this.loadFromWeightMap(weightMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
public async loadFromDisk(filePath: string | undefined) {
|
||||||
|
if (filePath && typeof filePath !== 'string') {
|
||||||
|
throw new Error(`${this._name}.loadFromDisk - expected model file path`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { readFile } = env.getEnv()
|
||||||
|
|
||||||
|
const { manifestUri, modelBaseUri } = getModelUris(filePath, this.getDefaultModelName())
|
||||||
|
|
||||||
|
const fetchWeightsFromDisk = (filePaths: string[]) => Promise.all(
|
||||||
|
filePaths.map(filePath => readFile(filePath).then(buf => buf.buffer))
|
||||||
|
)
|
||||||
|
const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk)
|
||||||
|
|
||||||
|
const manifest = JSON.parse((await readFile(manifestUri)).toString())
|
||||||
|
const weightMap = await loadWeights(manifest, modelBaseUri)
|
||||||
|
|
||||||
|
this.loadFromWeightMap(weightMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
public loadFromWeightMap(weightMap: tf.NamedTensorMap) {
|
||||||
|
const {
|
||||||
|
paramMappings,
|
||||||
|
params
|
||||||
|
} = this.extractParamsFromWeigthMap(weightMap)
|
||||||
|
|
||||||
|
this._paramMappings = paramMappings
|
||||||
|
this._params = params
|
||||||
|
}
|
||||||
|
|
||||||
|
public extractWeights(weights: Float32Array) {
|
||||||
|
const {
|
||||||
|
paramMappings,
|
||||||
|
params
|
||||||
|
} = this.extractParams(weights)
|
||||||
|
|
||||||
|
this._paramMappings = paramMappings
|
||||||
|
this._params = params
|
||||||
|
}
|
||||||
|
|
||||||
|
private traversePropertyPath(paramPath: string) {
|
||||||
|
if (!this.params) {
|
||||||
|
throw new Error(`traversePropertyPath - model has no loaded params`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = paramPath.split('/').reduce((res: { nextObj: any, obj?: any, objProp?: string }, objProp) => {
|
||||||
|
if (!res.nextObj.hasOwnProperty(objProp)) {
|
||||||
|
throw new Error(`traversePropertyPath - object does not have property ${objProp}, for path ${paramPath}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return { obj: res.nextObj, objProp, nextObj: res.nextObj[objProp] }
|
||||||
|
}, { nextObj: this.params })
|
||||||
|
|
||||||
|
const { obj, objProp } = result
|
||||||
|
if (!obj || !objProp || !(obj[objProp] instanceof tf.Tensor)) {
|
||||||
|
throw new Error(`traversePropertyPath - parameter is not a tensor, for path ${paramPath}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return { obj, objProp }
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract getDefaultModelName(): string
|
||||||
|
protected abstract extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap): { params: TNetParams, paramMappings: ParamMapping[] }
|
||||||
|
protected abstract extractParams(weights: Float32Array): { params: TNetParams, paramMappings: ParamMapping[] }
|
||||||
|
}
|
|
@ -0,0 +1,126 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { fullyConnectedLayer } from '../common/fullyConnectedLayer';
|
||||||
|
import { seperateWeightMaps } from '../faceProcessor/util';
|
||||||
|
import { TinyXception } from '../xception/TinyXception';
|
||||||
|
import { extractParams } from './extractParams';
|
||||||
|
import { extractParamsFromWeigthMap } from './extractParamsFromWeigthMap';
|
||||||
|
import { AgeAndGenderPrediction, Gender, NetOutput, NetParams } from './types';
|
||||||
|
import { NeuralNetwork } from '../NeuralNetwork';
|
||||||
|
import { NetInput, TNetInput, toNetInput } from '../dom';
|
||||||
|
|
||||||
|
export class AgeGenderNet extends NeuralNetwork<NetParams> {
|
||||||
|
|
||||||
|
private _faceFeatureExtractor: TinyXception
|
||||||
|
|
||||||
|
constructor(faceFeatureExtractor: TinyXception = new TinyXception(2)) {
|
||||||
|
super('AgeGenderNet')
|
||||||
|
this._faceFeatureExtractor = faceFeatureExtractor
|
||||||
|
}
|
||||||
|
|
||||||
|
public get faceFeatureExtractor(): TinyXception {
|
||||||
|
return this._faceFeatureExtractor
|
||||||
|
}
|
||||||
|
|
||||||
|
public runNet(input: NetInput | tf.Tensor4D): NetOutput {
|
||||||
|
|
||||||
|
const { params } = this
|
||||||
|
|
||||||
|
if (!params) {
|
||||||
|
throw new Error(`${this._name} - load model before inference`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const bottleneckFeatures = input instanceof NetInput
|
||||||
|
? this.faceFeatureExtractor.forwardInput(input)
|
||||||
|
: input
|
||||||
|
|
||||||
|
const pooled = tf.avgPool(bottleneckFeatures, [7, 7], [2, 2], 'valid').as2D(bottleneckFeatures.shape[0], -1)
|
||||||
|
const age = fullyConnectedLayer(pooled, params.fc.age).as1D()
|
||||||
|
const gender = fullyConnectedLayer(pooled, params.fc.gender)
|
||||||
|
return { age, gender }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public forwardInput(input: NetInput | tf.Tensor4D): NetOutput {
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const { age, gender } = this.runNet(input)
|
||||||
|
return { age, gender: tf.softmax(gender) }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public async forward(input: TNetInput): Promise<NetOutput> {
|
||||||
|
return this.forwardInput(await toNetInput(input))
|
||||||
|
}
|
||||||
|
|
||||||
|
public async predictAgeAndGender(input: TNetInput): Promise<AgeAndGenderPrediction | AgeAndGenderPrediction[]> {
|
||||||
|
const netInput = await toNetInput(input)
|
||||||
|
const out = await this.forwardInput(netInput)
|
||||||
|
|
||||||
|
const ages = tf.unstack(out.age)
|
||||||
|
const genders = tf.unstack(out.gender)
|
||||||
|
const ageAndGenderTensors = ages.map((ageTensor, i) => ({
|
||||||
|
ageTensor,
|
||||||
|
genderTensor: genders[i]
|
||||||
|
}))
|
||||||
|
|
||||||
|
const predictionsByBatch = await Promise.all(
|
||||||
|
ageAndGenderTensors.map(async ({ ageTensor, genderTensor }) => {
|
||||||
|
const age = (await ageTensor.data())[0]
|
||||||
|
const probMale = (await genderTensor.data())[0]
|
||||||
|
const isMale = probMale > 0.5
|
||||||
|
const gender = isMale ? Gender.MALE : Gender.FEMALE
|
||||||
|
const genderProbability = isMale ? probMale : (1 - probMale)
|
||||||
|
|
||||||
|
ageTensor.dispose()
|
||||||
|
genderTensor.dispose()
|
||||||
|
return { age, gender, genderProbability }
|
||||||
|
})
|
||||||
|
)
|
||||||
|
out.age.dispose()
|
||||||
|
out.gender.dispose()
|
||||||
|
|
||||||
|
return netInput.isBatchInput
|
||||||
|
? predictionsByBatch
|
||||||
|
: predictionsByBatch[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getDefaultModelName(): string {
|
||||||
|
return 'age_gender_model'
|
||||||
|
}
|
||||||
|
|
||||||
|
public dispose(throwOnRedispose: boolean = true) {
|
||||||
|
this.faceFeatureExtractor.dispose(throwOnRedispose)
|
||||||
|
super.dispose(throwOnRedispose)
|
||||||
|
}
|
||||||
|
|
||||||
|
public loadClassifierParams(weights: Float32Array) {
|
||||||
|
const { params, paramMappings } = this.extractClassifierParams(weights)
|
||||||
|
this._params = params
|
||||||
|
this._paramMappings = paramMappings
|
||||||
|
}
|
||||||
|
|
||||||
|
public extractClassifierParams(weights: Float32Array) {
|
||||||
|
return extractParams(weights)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) {
|
||||||
|
|
||||||
|
const { featureExtractorMap, classifierMap } = seperateWeightMaps(weightMap)
|
||||||
|
|
||||||
|
this.faceFeatureExtractor.loadFromWeightMap(featureExtractorMap)
|
||||||
|
|
||||||
|
return extractParamsFromWeigthMap(classifierMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected extractParams(weights: Float32Array) {
|
||||||
|
|
||||||
|
const classifierWeightSize = (512 * 1 + 1) + (512 * 2 + 2)
|
||||||
|
|
||||||
|
const featureExtractorWeights = weights.slice(0, weights.length - classifierWeightSize)
|
||||||
|
const classifierWeights = weights.slice(weights.length - classifierWeightSize)
|
||||||
|
|
||||||
|
this.faceFeatureExtractor.extractWeights(featureExtractorWeights)
|
||||||
|
return this.extractClassifierParams(classifierWeights)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
import { extractFCParamsFactory, extractWeightsFactory, ParamMapping } from '../common';
|
||||||
|
import { NetParams } from './types';
|
||||||
|
|
||||||
|
export function extractParams(weights: Float32Array): { params: NetParams, paramMappings: ParamMapping[] } {
|
||||||
|
|
||||||
|
const paramMappings: ParamMapping[] = []
|
||||||
|
|
||||||
|
const {
|
||||||
|
extractWeights,
|
||||||
|
getRemainingWeights
|
||||||
|
} = extractWeightsFactory(weights)
|
||||||
|
|
||||||
|
const extractFCParams = extractFCParamsFactory(extractWeights, paramMappings)
|
||||||
|
|
||||||
|
const age = extractFCParams(512, 1, 'fc/age')
|
||||||
|
const gender = extractFCParams(512, 2, 'fc/gender')
|
||||||
|
|
||||||
|
if (getRemainingWeights().length !== 0) {
|
||||||
|
throw new Error(`weights remaing after extract: ${getRemainingWeights().length}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
paramMappings,
|
||||||
|
params: { fc: { age, gender } }
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { disposeUnusedWeightTensors, extractWeightEntryFactory, FCParams, ParamMapping } from '../common';
|
||||||
|
import { NetParams } from './types';
|
||||||
|
|
||||||
|
export function extractParamsFromWeigthMap(
|
||||||
|
weightMap: tf.NamedTensorMap
|
||||||
|
): { params: NetParams, paramMappings: ParamMapping[] } {
|
||||||
|
|
||||||
|
const paramMappings: ParamMapping[] = []
|
||||||
|
|
||||||
|
const extractWeightEntry = extractWeightEntryFactory(weightMap, paramMappings)
|
||||||
|
|
||||||
|
function extractFcParams(prefix: string): FCParams {
|
||||||
|
const weights = extractWeightEntry<tf.Tensor2D>(`${prefix}/weights`, 2)
|
||||||
|
const bias = extractWeightEntry<tf.Tensor1D>(`${prefix}/bias`, 1)
|
||||||
|
return { weights, bias }
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
fc: {
|
||||||
|
age: extractFcParams('fc/age'),
|
||||||
|
gender: extractFcParams('fc/gender')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
disposeUnusedWeightTensors(weightMap, paramMappings)
|
||||||
|
|
||||||
|
return { params, paramMappings }
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * from './AgeGenderNet';
|
||||||
|
export * from './types';
|
|
@ -0,0 +1,23 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { FCParams } from '../common';
|
||||||
|
|
||||||
|
export type AgeAndGenderPrediction = {
|
||||||
|
age: number
|
||||||
|
gender: Gender
|
||||||
|
genderProbability: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum Gender {
|
||||||
|
FEMALE = 'female',
|
||||||
|
MALE = 'male'
|
||||||
|
}
|
||||||
|
|
||||||
|
export type NetOutput = { age: tf.Tensor1D, gender: tf.Tensor2D }
|
||||||
|
|
||||||
|
export type NetParams = {
|
||||||
|
fc: {
|
||||||
|
age: FCParams
|
||||||
|
gender: FCParams
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
import { Box } from './Box';
|
||||||
|
|
||||||
|
export interface IBoundingBox {
|
||||||
|
left: number
|
||||||
|
top: number
|
||||||
|
right: number
|
||||||
|
bottom: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export class BoundingBox extends Box<BoundingBox> implements IBoundingBox {
|
||||||
|
constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions: boolean = false) {
|
||||||
|
super({ left, top, right, bottom }, allowNegativeDimensions)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,175 @@
|
||||||
|
import { isDimensions, isValidNumber } from '../utils';
|
||||||
|
import { IBoundingBox } from './BoundingBox';
|
||||||
|
import { IDimensions } from './Dimensions';
|
||||||
|
import { Point } from './Point';
|
||||||
|
import { IRect } from './Rect';
|
||||||
|
|
||||||
|
export class Box<BoxType = any> implements IBoundingBox, IRect {
|
||||||
|
|
||||||
|
public static isRect(rect: any): boolean {
|
||||||
|
return !!rect && [rect.x, rect.y, rect.width, rect.height].every(isValidNumber)
|
||||||
|
}
|
||||||
|
|
||||||
|
public static assertIsValidBox(box: any, callee: string, allowNegativeDimensions: boolean = false) {
|
||||||
|
if (!Box.isRect(box)) {
|
||||||
|
throw new Error(`${callee} - invalid box: ${JSON.stringify(box)}, expected object with properties x, y, width, height`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!allowNegativeDimensions && (box.width < 0 || box.height < 0)) {
|
||||||
|
throw new Error(`${callee} - width (${box.width}) and height (${box.height}) must be positive numbers`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private _x: number
|
||||||
|
private _y: number
|
||||||
|
private _width: number
|
||||||
|
private _height: number
|
||||||
|
|
||||||
|
constructor(_box: IBoundingBox | IRect, allowNegativeDimensions: boolean = true) {
|
||||||
|
const box = (_box || {}) as any
|
||||||
|
|
||||||
|
const isBbox = [box.left, box.top, box.right, box.bottom].every(isValidNumber)
|
||||||
|
const isRect = [box.x, box.y, box.width, box.height].every(isValidNumber)
|
||||||
|
|
||||||
|
if (!isRect && !isBbox) {
|
||||||
|
throw new Error(`Box.constructor - expected box to be IBoundingBox | IRect, instead have ${JSON.stringify(box)}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const [x, y, width, height] = isRect
|
||||||
|
? [box.x, box.y, box.width, box.height]
|
||||||
|
: [box.left, box.top, box.right - box.left, box.bottom - box.top]
|
||||||
|
|
||||||
|
Box.assertIsValidBox({ x, y, width, height }, 'Box.constructor', allowNegativeDimensions)
|
||||||
|
|
||||||
|
this._x = x
|
||||||
|
this._y = y
|
||||||
|
this._width = width
|
||||||
|
this._height = height
|
||||||
|
}
|
||||||
|
|
||||||
|
public get x(): number { return this._x }
|
||||||
|
public get y(): number { return this._y }
|
||||||
|
public get width(): number { return this._width }
|
||||||
|
public get height(): number { return this._height }
|
||||||
|
public get left(): number { return this.x }
|
||||||
|
public get top(): number { return this.y }
|
||||||
|
public get right(): number { return this.x + this.width }
|
||||||
|
public get bottom(): number { return this.y + this.height }
|
||||||
|
public get area(): number { return this.width * this.height }
|
||||||
|
public get topLeft(): Point { return new Point(this.left, this.top) }
|
||||||
|
public get topRight(): Point { return new Point(this.right, this.top) }
|
||||||
|
public get bottomLeft(): Point { return new Point(this.left, this.bottom) }
|
||||||
|
public get bottomRight(): Point { return new Point(this.right, this.bottom) }
|
||||||
|
|
||||||
|
public round(): Box<BoxType> {
|
||||||
|
const [x, y, width, height] = [this.x, this.y, this.width, this.height]
|
||||||
|
.map(val => Math.round(val))
|
||||||
|
return new Box({ x, y, width, height })
|
||||||
|
}
|
||||||
|
|
||||||
|
public floor(): Box<BoxType> {
|
||||||
|
const [x, y, width, height] = [this.x, this.y, this.width, this.height]
|
||||||
|
.map(val => Math.floor(val))
|
||||||
|
return new Box({ x, y, width, height })
|
||||||
|
}
|
||||||
|
|
||||||
|
public toSquare(): Box<BoxType> {
|
||||||
|
let { x, y, width, height } = this
|
||||||
|
const diff = Math.abs(width - height)
|
||||||
|
if (width < height) {
|
||||||
|
x -= (diff / 2)
|
||||||
|
width += diff
|
||||||
|
}
|
||||||
|
if (height < width) {
|
||||||
|
y -= (diff / 2)
|
||||||
|
height += diff
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Box({ x, y, width, height })
|
||||||
|
}
|
||||||
|
|
||||||
|
public rescale(s: IDimensions | number): Box<BoxType> {
|
||||||
|
const scaleX = isDimensions(s) ? (s as IDimensions).width : s as number
|
||||||
|
const scaleY = isDimensions(s) ? (s as IDimensions).height : s as number
|
||||||
|
return new Box({
|
||||||
|
x: this.x * scaleX,
|
||||||
|
y: this.y * scaleY,
|
||||||
|
width: this.width * scaleX,
|
||||||
|
height: this.height * scaleY
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public pad(padX: number, padY: number): Box<BoxType> {
|
||||||
|
let [x, y, width, height] = [
|
||||||
|
this.x - (padX / 2),
|
||||||
|
this.y - (padY / 2),
|
||||||
|
this.width + padX,
|
||||||
|
this.height + padY
|
||||||
|
]
|
||||||
|
return new Box({ x, y, width, height })
|
||||||
|
}
|
||||||
|
|
||||||
|
public clipAtImageBorders(imgWidth: number, imgHeight: number): Box<BoxType> {
|
||||||
|
const { x, y, right, bottom } = this
|
||||||
|
const clippedX = Math.max(x, 0)
|
||||||
|
const clippedY = Math.max(y, 0)
|
||||||
|
|
||||||
|
const newWidth = right - clippedX
|
||||||
|
const newHeight = bottom - clippedY
|
||||||
|
const clippedWidth = Math.min(newWidth, imgWidth - clippedX)
|
||||||
|
const clippedHeight = Math.min(newHeight, imgHeight - clippedY)
|
||||||
|
|
||||||
|
return (new Box({ x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight})).floor()
|
||||||
|
}
|
||||||
|
|
||||||
|
public shift(sx: number, sy: number): Box<BoxType> {
|
||||||
|
const { width, height } = this
|
||||||
|
const x = this.x + sx
|
||||||
|
const y = this.y + sy
|
||||||
|
|
||||||
|
return new Box({ x, y, width, height })
|
||||||
|
}
|
||||||
|
|
||||||
|
public padAtBorders(imageHeight: number, imageWidth: number) {
|
||||||
|
const w = this.width + 1
|
||||||
|
const h = this.height + 1
|
||||||
|
|
||||||
|
let dx = 1
|
||||||
|
let dy = 1
|
||||||
|
let edx = w
|
||||||
|
let edy = h
|
||||||
|
|
||||||
|
let x = this.left
|
||||||
|
let y = this.top
|
||||||
|
let ex = this.right
|
||||||
|
let ey = this.bottom
|
||||||
|
|
||||||
|
if (ex > imageWidth) {
|
||||||
|
edx = -ex + imageWidth + w
|
||||||
|
ex = imageWidth
|
||||||
|
}
|
||||||
|
if (ey > imageHeight) {
|
||||||
|
edy = -ey + imageHeight + h
|
||||||
|
ey = imageHeight
|
||||||
|
}
|
||||||
|
if (x < 1) {
|
||||||
|
edy = 2 - x
|
||||||
|
x = 1
|
||||||
|
}
|
||||||
|
if (y < 1) {
|
||||||
|
edy = 2 - y
|
||||||
|
y = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return { dy, edy, dx, edx, y, ey, x, ex, w, h }
|
||||||
|
}
|
||||||
|
|
||||||
|
public calibrate(region: Box) {
|
||||||
|
return new Box({
|
||||||
|
left: this.left + (region.left * this.width),
|
||||||
|
top: this.top + (region.top * this.height),
|
||||||
|
right: this.right + (region.right * this.width),
|
||||||
|
bottom: this.bottom + (region.bottom * this.height)
|
||||||
|
}).toSquare().round()
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
import { isValidNumber } from '../utils';
|
||||||
|
|
||||||
|
export interface IDimensions {
|
||||||
|
width: number
|
||||||
|
height: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Dimensions implements IDimensions {
|
||||||
|
|
||||||
|
private _width: number
|
||||||
|
private _height: number
|
||||||
|
|
||||||
|
constructor(width: number, height: number) {
|
||||||
|
if (!isValidNumber(width) || !isValidNumber(height)) {
|
||||||
|
throw new Error(`Dimensions.constructor - expected width and height to be valid numbers, instead have ${JSON.stringify({ width, height })}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
this._width = width
|
||||||
|
this._height = height
|
||||||
|
}
|
||||||
|
|
||||||
|
public get width(): number { return this._width }
|
||||||
|
public get height(): number { return this._height }
|
||||||
|
|
||||||
|
public reverse(): Dimensions {
|
||||||
|
return new Dimensions(1 / this.width, 1 / this.height)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,24 @@
|
||||||
|
import { Box } from './Box';
|
||||||
|
import { IDimensions } from './Dimensions';
|
||||||
|
import { ObjectDetection } from './ObjectDetection';
|
||||||
|
import { Rect } from './Rect';
|
||||||
|
|
||||||
|
export interface IFaceDetecion {
|
||||||
|
score: number
|
||||||
|
box: Box
|
||||||
|
}
|
||||||
|
|
||||||
|
export class FaceDetection extends ObjectDetection implements IFaceDetecion {
|
||||||
|
constructor(
|
||||||
|
score: number,
|
||||||
|
relativeBox: Rect,
|
||||||
|
imageDims: IDimensions
|
||||||
|
) {
|
||||||
|
super(score, score, '', relativeBox, imageDims)
|
||||||
|
}
|
||||||
|
|
||||||
|
public forSize(width: number, height: number): FaceDetection {
|
||||||
|
const { score, relativeBox, imageDims } = super.forSize(width, height)
|
||||||
|
return new FaceDetection(score, relativeBox, imageDims)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,125 @@
|
||||||
|
import { minBbox } from '../ops';
|
||||||
|
import { getCenterPoint } from '../utils';
|
||||||
|
import { IBoundingBox } from './BoundingBox';
|
||||||
|
import { Box } from './Box';
|
||||||
|
import { Dimensions, IDimensions } from './Dimensions';
|
||||||
|
import { FaceDetection } from './FaceDetection';
|
||||||
|
import { Point } from './Point';
|
||||||
|
import { IRect, Rect } from './Rect';
|
||||||
|
|
||||||
|
// face alignment constants
|
||||||
|
const relX = 0.5
|
||||||
|
const relY = 0.43
|
||||||
|
const relScale = 0.45
|
||||||
|
|
||||||
|
export interface IFaceLandmarks {
|
||||||
|
positions: Point[]
|
||||||
|
shift: Point
|
||||||
|
}
|
||||||
|
|
||||||
|
export class FaceLandmarks implements IFaceLandmarks {
|
||||||
|
protected _shift: Point
|
||||||
|
protected _positions: Point[]
|
||||||
|
protected _imgDims: Dimensions
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
relativeFaceLandmarkPositions: Point[],
|
||||||
|
imgDims: IDimensions,
|
||||||
|
shift: Point = new Point(0, 0)
|
||||||
|
) {
|
||||||
|
const { width, height } = imgDims
|
||||||
|
this._imgDims = new Dimensions(width, height)
|
||||||
|
this._shift = shift
|
||||||
|
this._positions = relativeFaceLandmarkPositions.map(
|
||||||
|
pt => pt.mul(new Point(width, height)).add(shift)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
public get shift(): Point { return new Point(this._shift.x, this._shift.y) }
|
||||||
|
public get imageWidth(): number { return this._imgDims.width }
|
||||||
|
public get imageHeight(): number { return this._imgDims.height }
|
||||||
|
public get positions(): Point[] { return this._positions }
|
||||||
|
public get relativePositions(): Point[] {
|
||||||
|
return this._positions.map(
|
||||||
|
pt => pt.sub(this._shift).div(new Point(this.imageWidth, this.imageHeight))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
public forSize<T extends FaceLandmarks>(width: number, height: number): T {
|
||||||
|
return new (this.constructor as any)(
|
||||||
|
this.relativePositions,
|
||||||
|
{ width, height }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
public shiftBy<T extends FaceLandmarks>(x: number, y: number): T {
|
||||||
|
return new (this.constructor as any)(
|
||||||
|
this.relativePositions,
|
||||||
|
this._imgDims,
|
||||||
|
new Point(x, y)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
public shiftByPoint<T extends FaceLandmarks>(pt: Point): T {
|
||||||
|
return this.shiftBy(pt.x, pt.y)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aligns the face landmarks after face detection from the relative positions of the faces
|
||||||
|
* bounding box, or it's current shift. This function should be used to align the face images
|
||||||
|
* after face detection has been performed, before they are passed to the face recognition net.
|
||||||
|
* This will make the computed face descriptor more accurate.
|
||||||
|
*
|
||||||
|
* @param detection (optional) The bounding box of the face or the face detection result. If
|
||||||
|
* no argument was passed the position of the face landmarks are assumed to be relative to
|
||||||
|
* it's current shift.
|
||||||
|
* @returns The bounding box of the aligned face.
|
||||||
|
*/
|
||||||
|
public align(
|
||||||
|
detection?: FaceDetection | IRect | IBoundingBox | null,
|
||||||
|
options: { useDlibAlignment?: boolean, minBoxPadding?: number } = { }
|
||||||
|
): Box {
|
||||||
|
if (detection) {
|
||||||
|
const box = detection instanceof FaceDetection
|
||||||
|
? detection.box.floor()
|
||||||
|
: new Box(detection)
|
||||||
|
|
||||||
|
return this.shiftBy(box.x, box.y).align(null, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { useDlibAlignment, minBoxPadding } = Object.assign({}, { useDlibAlignment: false, minBoxPadding: 0.2 }, options)
|
||||||
|
|
||||||
|
if (useDlibAlignment) {
|
||||||
|
return this.alignDlib()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.alignMinBbox(minBoxPadding)
|
||||||
|
}
|
||||||
|
|
||||||
|
private alignDlib(): Box {
|
||||||
|
|
||||||
|
const centers = this.getRefPointsForAlignment()
|
||||||
|
|
||||||
|
const [leftEyeCenter, rightEyeCenter, mouthCenter] = centers
|
||||||
|
const distToMouth = (pt: Point) => mouthCenter.sub(pt).magnitude()
|
||||||
|
const eyeToMouthDist = (distToMouth(leftEyeCenter) + distToMouth(rightEyeCenter)) / 2
|
||||||
|
|
||||||
|
const size = Math.floor(eyeToMouthDist / relScale)
|
||||||
|
|
||||||
|
const refPoint = getCenterPoint(centers)
|
||||||
|
// TODO: pad in case rectangle is out of image bounds
|
||||||
|
const x = Math.floor(Math.max(0, refPoint.x - (relX * size)))
|
||||||
|
const y = Math.floor(Math.max(0, refPoint.y - (relY * size)))
|
||||||
|
|
||||||
|
return new Rect(x, y, Math.min(size, this.imageWidth + x), Math.min(size, this.imageHeight + y))
|
||||||
|
}
|
||||||
|
|
||||||
|
private alignMinBbox(padding: number): Box {
|
||||||
|
const box = minBbox(this.positions)
|
||||||
|
return box.pad(box.width * padding, box.height * padding)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getRefPointsForAlignment(): Point[] {
|
||||||
|
throw new Error('getRefPointsForAlignment not implemented by base class')
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,16 @@
|
||||||
|
import { getCenterPoint } from '../utils';
|
||||||
|
import { FaceLandmarks } from './FaceLandmarks';
|
||||||
|
import { Point } from './Point';
|
||||||
|
|
||||||
|
|
||||||
|
export class FaceLandmarks5 extends FaceLandmarks {
|
||||||
|
|
||||||
|
protected getRefPointsForAlignment(): Point[] {
|
||||||
|
const pts = this.positions
|
||||||
|
return [
|
||||||
|
pts[0],
|
||||||
|
pts[1],
|
||||||
|
getCenterPoint([pts[3], pts[4]])
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,41 @@
|
||||||
|
import { getCenterPoint } from '../utils';
|
||||||
|
import { FaceLandmarks } from './FaceLandmarks';
|
||||||
|
import { Point } from './Point';
|
||||||
|
|
||||||
|
export class FaceLandmarks68 extends FaceLandmarks {
|
||||||
|
public getJawOutline(): Point[] {
|
||||||
|
return this.positions.slice(0, 17)
|
||||||
|
}
|
||||||
|
|
||||||
|
public getLeftEyeBrow(): Point[] {
|
||||||
|
return this.positions.slice(17, 22)
|
||||||
|
}
|
||||||
|
|
||||||
|
public getRightEyeBrow(): Point[] {
|
||||||
|
return this.positions.slice(22, 27)
|
||||||
|
}
|
||||||
|
|
||||||
|
public getNose(): Point[] {
|
||||||
|
return this.positions.slice(27, 36)
|
||||||
|
}
|
||||||
|
|
||||||
|
public getLeftEye(): Point[] {
|
||||||
|
return this.positions.slice(36, 42)
|
||||||
|
}
|
||||||
|
|
||||||
|
public getRightEye(): Point[] {
|
||||||
|
return this.positions.slice(42, 48)
|
||||||
|
}
|
||||||
|
|
||||||
|
public getMouth(): Point[] {
|
||||||
|
return this.positions.slice(48, 68)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getRefPointsForAlignment(): Point[] {
|
||||||
|
return [
|
||||||
|
this.getLeftEye(),
|
||||||
|
this.getRightEye(),
|
||||||
|
this.getMouth()
|
||||||
|
].map(getCenterPoint)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,23 @@
|
||||||
|
import { round } from '../utils';
|
||||||
|
|
||||||
|
export interface IFaceMatch {
|
||||||
|
label: string
|
||||||
|
distance: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export class FaceMatch implements IFaceMatch {
|
||||||
|
private _label: string
|
||||||
|
private _distance: number
|
||||||
|
|
||||||
|
constructor(label: string, distance: number) {
|
||||||
|
this._label = label
|
||||||
|
this._distance = distance
|
||||||
|
}
|
||||||
|
|
||||||
|
public get label(): string { return this._label }
|
||||||
|
public get distance(): number { return this._distance }
|
||||||
|
|
||||||
|
public toString(withDistance: boolean = true): string {
|
||||||
|
return `${this.label}${withDistance ? ` (${round(this.distance)})` : ''}`
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
import { isValidNumber } from '../utils';
|
||||||
|
import { IBoundingBox } from './BoundingBox';
|
||||||
|
import { Box } from './Box';
|
||||||
|
import { IRect } from './Rect';
|
||||||
|
|
||||||
|
export class LabeledBox extends Box<LabeledBox> {
|
||||||
|
|
||||||
|
public static assertIsValidLabeledBox(box: any, callee: string) {
|
||||||
|
Box.assertIsValidBox(box, callee)
|
||||||
|
|
||||||
|
if (!isValidNumber(box.label)) {
|
||||||
|
throw new Error(`${callee} - expected property label (${box.label}) to be a number`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private _label: number
|
||||||
|
|
||||||
|
constructor(box: IBoundingBox | IRect | any, label: number) {
|
||||||
|
super(box)
|
||||||
|
this._label = label
|
||||||
|
}
|
||||||
|
|
||||||
|
public get label(): number { return this._label }
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,35 @@
|
||||||
|
export class LabeledFaceDescriptors {
|
||||||
|
private _label: string
|
||||||
|
private _descriptors: Float32Array[]
|
||||||
|
|
||||||
|
constructor(label: string, descriptors: Float32Array[]) {
|
||||||
|
if (!(typeof label === 'string')) {
|
||||||
|
throw new Error('LabeledFaceDescriptors - constructor expected label to be a string')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(descriptors) || descriptors.some(desc => !(desc instanceof Float32Array))) {
|
||||||
|
throw new Error('LabeledFaceDescriptors - constructor expected descriptors to be an array of Float32Array')
|
||||||
|
}
|
||||||
|
|
||||||
|
this._label = label
|
||||||
|
this._descriptors = descriptors
|
||||||
|
}
|
||||||
|
|
||||||
|
public get label(): string { return this._label }
|
||||||
|
public get descriptors(): Float32Array[] { return this._descriptors }
|
||||||
|
|
||||||
|
public toJSON(): any {
|
||||||
|
return {
|
||||||
|
label: this.label,
|
||||||
|
descriptors: this.descriptors.map((d) => Array.from(d))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public static fromJSON(json: any): LabeledFaceDescriptors {
|
||||||
|
const descriptors = json.descriptors.map((d: any) => {
|
||||||
|
return new Float32Array(d);
|
||||||
|
});
|
||||||
|
return new LabeledFaceDescriptors(json.label, descriptors);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,44 @@
|
||||||
|
import { Box } from './Box';
|
||||||
|
import { Dimensions, IDimensions } from './Dimensions';
|
||||||
|
import { IRect, Rect } from './Rect';
|
||||||
|
|
||||||
|
export class ObjectDetection {
|
||||||
|
private _score: number
|
||||||
|
private _classScore: number
|
||||||
|
private _className: string
|
||||||
|
private _box: Rect
|
||||||
|
private _imageDims: Dimensions
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
score: number,
|
||||||
|
classScore: number,
|
||||||
|
className: string,
|
||||||
|
relativeBox: IRect,
|
||||||
|
imageDims: IDimensions
|
||||||
|
) {
|
||||||
|
this._imageDims = new Dimensions(imageDims.width, imageDims.height)
|
||||||
|
this._score = score
|
||||||
|
this._classScore = classScore
|
||||||
|
this._className = className
|
||||||
|
this._box = new Box(relativeBox).rescale(this._imageDims)
|
||||||
|
}
|
||||||
|
|
||||||
|
public get score(): number { return this._score }
|
||||||
|
public get classScore(): number { return this._classScore }
|
||||||
|
public get className(): string { return this._className }
|
||||||
|
public get box(): Box { return this._box }
|
||||||
|
public get imageDims(): Dimensions { return this._imageDims }
|
||||||
|
public get imageWidth(): number { return this.imageDims.width }
|
||||||
|
public get imageHeight(): number { return this.imageDims.height }
|
||||||
|
public get relativeBox(): Box { return new Box(this._box).rescale(this.imageDims.reverse()) }
|
||||||
|
|
||||||
|
public forSize(width: number, height: number): ObjectDetection {
|
||||||
|
return new ObjectDetection(
|
||||||
|
this.score,
|
||||||
|
this.classScore,
|
||||||
|
this.className,
|
||||||
|
this.relativeBox,
|
||||||
|
{ width, height}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
export interface IPoint {
|
||||||
|
x: number
|
||||||
|
y: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Point implements IPoint {
|
||||||
|
private _x: number
|
||||||
|
private _y: number
|
||||||
|
|
||||||
|
constructor(x: number, y: number) {
|
||||||
|
this._x = x
|
||||||
|
this._y = y
|
||||||
|
}
|
||||||
|
|
||||||
|
get x(): number { return this._x }
|
||||||
|
get y(): number { return this._y }
|
||||||
|
|
||||||
|
public add(pt: IPoint): Point {
|
||||||
|
return new Point(this.x + pt.x, this.y + pt.y)
|
||||||
|
}
|
||||||
|
|
||||||
|
public sub(pt: IPoint): Point {
|
||||||
|
return new Point(this.x - pt.x, this.y - pt.y)
|
||||||
|
}
|
||||||
|
|
||||||
|
public mul(pt: IPoint): Point {
|
||||||
|
return new Point(this.x * pt.x, this.y * pt.y)
|
||||||
|
}
|
||||||
|
|
||||||
|
public div(pt: IPoint): Point {
|
||||||
|
return new Point(this.x / pt.x, this.y / pt.y)
|
||||||
|
}
|
||||||
|
|
||||||
|
public abs(): Point {
|
||||||
|
return new Point(Math.abs(this.x), Math.abs(this.y))
|
||||||
|
}
|
||||||
|
|
||||||
|
public magnitude(): number {
|
||||||
|
return Math.sqrt(Math.pow(this.x, 2) + Math.pow(this.y, 2))
|
||||||
|
}
|
||||||
|
|
||||||
|
public floor(): Point {
|
||||||
|
return new Point(Math.floor(this.x), Math.floor(this.y))
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
import { isValidProbablitiy } from '../utils';
|
||||||
|
import { IBoundingBox } from './BoundingBox';
|
||||||
|
import { LabeledBox } from './LabeledBox';
|
||||||
|
import { IRect } from './Rect';
|
||||||
|
|
||||||
|
export class PredictedBox extends LabeledBox {
|
||||||
|
|
||||||
|
public static assertIsValidPredictedBox(box: any, callee: string) {
|
||||||
|
LabeledBox.assertIsValidLabeledBox(box, callee)
|
||||||
|
|
||||||
|
if (
|
||||||
|
!isValidProbablitiy(box.score)
|
||||||
|
|| !isValidProbablitiy(box.classScore)
|
||||||
|
) {
|
||||||
|
throw new Error(`${callee} - expected properties score (${box.score}) and (${box.classScore}) to be a number between [0, 1]`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private _score: number
|
||||||
|
private _classScore: number
|
||||||
|
|
||||||
|
constructor(box: IBoundingBox | IRect | any, label: number, score: number, classScore: number) {
|
||||||
|
super(box, label)
|
||||||
|
this._score = score
|
||||||
|
this._classScore = classScore
|
||||||
|
}
|
||||||
|
|
||||||
|
public get score(): number { return this._score }
|
||||||
|
public get classScore(): number { return this._classScore }
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
import { Box } from './Box';
|
||||||
|
|
||||||
|
export interface IRect {
|
||||||
|
x: number
|
||||||
|
y: number
|
||||||
|
width: number
|
||||||
|
height: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Rect extends Box<Rect> implements IRect {
|
||||||
|
constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions: boolean = false) {
|
||||||
|
super({ x, y, width, height }, allowNegativeDimensions)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
export * from './BoundingBox'
|
||||||
|
export * from './Box'
|
||||||
|
export * from './Dimensions'
|
||||||
|
export * from './FaceDetection';
|
||||||
|
export * from './FaceLandmarks';
|
||||||
|
export * from './FaceLandmarks5';
|
||||||
|
export * from './FaceLandmarks68';
|
||||||
|
export * from './FaceMatch';
|
||||||
|
export * from './LabeledBox'
|
||||||
|
export * from './LabeledFaceDescriptors';
|
||||||
|
export * from './ObjectDetection'
|
||||||
|
export * from './Point'
|
||||||
|
export * from './PredictedBox'
|
||||||
|
export * from './Rect'
|
|
@ -0,0 +1,19 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { ConvParams } from './types';
|
||||||
|
|
||||||
|
export function convLayer(
|
||||||
|
x: tf.Tensor4D,
|
||||||
|
params: ConvParams,
|
||||||
|
padding: 'valid' | 'same' = 'same',
|
||||||
|
withRelu: boolean = false
|
||||||
|
): tf.Tensor4D {
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const out = tf.add(
|
||||||
|
tf.conv2d(x, params.filters, [1, 1], padding),
|
||||||
|
params.bias
|
||||||
|
) as tf.Tensor4D
|
||||||
|
|
||||||
|
return withRelu ? tf.relu(out) : out
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { SeparableConvParams } from './types';
|
||||||
|
|
||||||
|
export function depthwiseSeparableConv(
|
||||||
|
x: tf.Tensor4D,
|
||||||
|
params: SeparableConvParams,
|
||||||
|
stride: [number, number]
|
||||||
|
): tf.Tensor4D {
|
||||||
|
return tf.tidy(() => {
|
||||||
|
let out = tf.separableConv2d(x, params.depthwise_filter, params.pointwise_filter, stride, 'same')
|
||||||
|
out = tf.add(out, params.bias)
|
||||||
|
return out
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,9 @@
|
||||||
|
import { ParamMapping } from './types';
|
||||||
|
|
||||||
|
export function disposeUnusedWeightTensors(weightMap: any, paramMappings: ParamMapping[]) {
|
||||||
|
Object.keys(weightMap).forEach(path => {
|
||||||
|
if (!paramMappings.some(pm => pm.originalPath === path)) {
|
||||||
|
weightMap[path].dispose()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { ConvParams, ExtractWeightsFunction, ParamMapping } from './types';
|
||||||
|
|
||||||
|
export function extractConvParamsFactory(
|
||||||
|
extractWeights: ExtractWeightsFunction,
|
||||||
|
paramMappings: ParamMapping[]
|
||||||
|
) {
|
||||||
|
|
||||||
|
return function(
|
||||||
|
channelsIn: number,
|
||||||
|
channelsOut: number,
|
||||||
|
filterSize: number,
|
||||||
|
mappedPrefix: string
|
||||||
|
): ConvParams {
|
||||||
|
|
||||||
|
const filters = tf.tensor4d(
|
||||||
|
extractWeights(channelsIn * channelsOut * filterSize * filterSize),
|
||||||
|
[filterSize, filterSize, channelsIn, channelsOut]
|
||||||
|
)
|
||||||
|
const bias = tf.tensor1d(extractWeights(channelsOut))
|
||||||
|
|
||||||
|
paramMappings.push(
|
||||||
|
{ paramPath: `${mappedPrefix}/filters` },
|
||||||
|
{ paramPath: `${mappedPrefix}/bias` }
|
||||||
|
)
|
||||||
|
|
||||||
|
return { filters, bias }
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { ExtractWeightsFunction, FCParams, ParamMapping } from './types';
|
||||||
|
|
||||||
|
|
||||||
|
export function extractFCParamsFactory(
|
||||||
|
extractWeights: ExtractWeightsFunction,
|
||||||
|
paramMappings: ParamMapping[]
|
||||||
|
) {
|
||||||
|
|
||||||
|
return function(
|
||||||
|
channelsIn: number,
|
||||||
|
channelsOut: number,
|
||||||
|
mappedPrefix: string
|
||||||
|
): FCParams {
|
||||||
|
|
||||||
|
const fc_weights = tf.tensor2d(extractWeights(channelsIn * channelsOut), [channelsIn, channelsOut])
|
||||||
|
const fc_bias = tf.tensor1d(extractWeights(channelsOut))
|
||||||
|
|
||||||
|
paramMappings.push(
|
||||||
|
{ paramPath: `${mappedPrefix}/weights` },
|
||||||
|
{ paramPath: `${mappedPrefix}/bias` }
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
weights: fc_weights,
|
||||||
|
bias: fc_bias
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,46 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { ExtractWeightsFunction, ParamMapping, SeparableConvParams } from './types';
|
||||||
|
|
||||||
|
export function extractSeparableConvParamsFactory(
|
||||||
|
extractWeights: ExtractWeightsFunction,
|
||||||
|
paramMappings: ParamMapping[]
|
||||||
|
) {
|
||||||
|
|
||||||
|
return function(channelsIn: number, channelsOut: number, mappedPrefix: string): SeparableConvParams {
|
||||||
|
const depthwise_filter = tf.tensor4d(extractWeights(3 * 3 * channelsIn), [3, 3, channelsIn, 1])
|
||||||
|
const pointwise_filter = tf.tensor4d(extractWeights(channelsIn * channelsOut), [1, 1, channelsIn, channelsOut])
|
||||||
|
const bias = tf.tensor1d(extractWeights(channelsOut))
|
||||||
|
|
||||||
|
paramMappings.push(
|
||||||
|
{ paramPath: `${mappedPrefix}/depthwise_filter` },
|
||||||
|
{ paramPath: `${mappedPrefix}/pointwise_filter` },
|
||||||
|
{ paramPath: `${mappedPrefix}/bias` }
|
||||||
|
)
|
||||||
|
|
||||||
|
return new SeparableConvParams(
|
||||||
|
depthwise_filter,
|
||||||
|
pointwise_filter,
|
||||||
|
bias
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
export function loadSeparableConvParamsFactory(
|
||||||
|
extractWeightEntry: <T>(originalPath: string, paramRank: number) => T
|
||||||
|
) {
|
||||||
|
|
||||||
|
return function (prefix: string): SeparableConvParams {
|
||||||
|
const depthwise_filter = extractWeightEntry<tf.Tensor4D>(`${prefix}/depthwise_filter`, 4)
|
||||||
|
const pointwise_filter = extractWeightEntry<tf.Tensor4D>(`${prefix}/pointwise_filter`, 4)
|
||||||
|
const bias = extractWeightEntry<tf.Tensor1D>(`${prefix}/bias`, 1)
|
||||||
|
|
||||||
|
return new SeparableConvParams(
|
||||||
|
depthwise_filter,
|
||||||
|
pointwise_filter,
|
||||||
|
bias
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
import { isTensor } from '../utils';
|
||||||
|
import { ParamMapping } from './types';
|
||||||
|
|
||||||
|
export function extractWeightEntryFactory(weightMap: any, paramMappings: ParamMapping[]) {
|
||||||
|
|
||||||
|
return function<T> (originalPath: string, paramRank: number, mappedPath?: string): T {
|
||||||
|
const tensor = weightMap[originalPath]
|
||||||
|
|
||||||
|
if (!isTensor(tensor, paramRank)) {
|
||||||
|
throw new Error(`expected weightMap[${originalPath}] to be a Tensor${paramRank}D, instead have ${tensor}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
paramMappings.push(
|
||||||
|
{ originalPath, paramPath: mappedPath || originalPath }
|
||||||
|
)
|
||||||
|
|
||||||
|
return tensor
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
export function extractWeightsFactory(weights: Float32Array) {
|
||||||
|
let remainingWeights = weights
|
||||||
|
|
||||||
|
function extractWeights(numWeights: number): Float32Array {
|
||||||
|
const ret = remainingWeights.slice(0, numWeights)
|
||||||
|
remainingWeights = remainingWeights.slice(numWeights)
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
function getRemainingWeights(): Float32Array {
|
||||||
|
return remainingWeights
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
extractWeights,
|
||||||
|
getRemainingWeights
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { FCParams } from './types';
|
||||||
|
|
||||||
|
export function fullyConnectedLayer(
|
||||||
|
x: tf.Tensor2D,
|
||||||
|
params: FCParams
|
||||||
|
): tf.Tensor2D {
|
||||||
|
return tf.tidy(() =>
|
||||||
|
tf.add(
|
||||||
|
tf.matMul(x, params.weights),
|
||||||
|
params.bias
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
|
@ -0,0 +1,33 @@
|
||||||
|
export function getModelUris(uri: string | undefined, defaultModelName: string) {
|
||||||
|
const defaultManifestFilename = `${defaultModelName}-weights_manifest.json`
|
||||||
|
|
||||||
|
if (!uri) {
|
||||||
|
return {
|
||||||
|
modelBaseUri: '',
|
||||||
|
manifestUri: defaultManifestFilename
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (uri === '/') {
|
||||||
|
return {
|
||||||
|
modelBaseUri: '/',
|
||||||
|
manifestUri: `/${defaultManifestFilename}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const protocol = uri.startsWith('http://') ? 'http://' : uri.startsWith('https://') ? 'https://' : '';
|
||||||
|
uri = uri.replace(protocol, '');
|
||||||
|
|
||||||
|
const parts = uri.split('/').filter(s => s)
|
||||||
|
|
||||||
|
const manifestFile = uri.endsWith('.json')
|
||||||
|
? parts[parts.length - 1]
|
||||||
|
: defaultManifestFilename
|
||||||
|
|
||||||
|
let modelBaseUri = protocol + (uri.endsWith('.json') ? parts.slice(0, parts.length - 1) : parts).join('/')
|
||||||
|
modelBaseUri = uri.startsWith('/') ? `/${modelBaseUri}` : modelBaseUri
|
||||||
|
|
||||||
|
return {
|
||||||
|
modelBaseUri,
|
||||||
|
manifestUri: modelBaseUri === '/' ? `/${manifestFile}` : `${modelBaseUri}/${manifestFile}`
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
export * from './convLayer'
|
||||||
|
export * from './depthwiseSeparableConv'
|
||||||
|
export * from './disposeUnusedWeightTensors'
|
||||||
|
export * from './extractConvParamsFactory'
|
||||||
|
export * from './extractFCParamsFactory'
|
||||||
|
export * from './extractSeparableConvParamsFactory'
|
||||||
|
export * from './extractWeightEntryFactory'
|
||||||
|
export * from './extractWeightsFactory'
|
||||||
|
export * from './getModelUris'
|
||||||
|
export * from './types'
|
|
@ -0,0 +1,12 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { ConvParams } from './types';
|
||||||
|
|
||||||
|
export function loadConvParamsFactory(extractWeightEntry: <T>(originalPath: string, paramRank: number) => T) {
|
||||||
|
return function(prefix: string): ConvParams {
|
||||||
|
const filters = extractWeightEntry<tf.Tensor4D>(`${prefix}/filters`, 4)
|
||||||
|
const bias = extractWeightEntry<tf.Tensor1D>(`${prefix}/bias`, 1)
|
||||||
|
|
||||||
|
return { filters, bias }
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
export type ExtractWeightsFunction = (numWeights: number) => Float32Array
|
||||||
|
|
||||||
|
export type ParamMapping = {
|
||||||
|
originalPath?: string
|
||||||
|
paramPath: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ConvParams = {
|
||||||
|
filters: tf.Tensor4D
|
||||||
|
bias: tf.Tensor1D
|
||||||
|
}
|
||||||
|
|
||||||
|
export type FCParams = {
|
||||||
|
weights: tf.Tensor2D
|
||||||
|
bias: tf.Tensor1D
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SeparableConvParams {
|
||||||
|
constructor(
|
||||||
|
public depthwise_filter: tf.Tensor4D,
|
||||||
|
public pointwise_filter: tf.Tensor4D,
|
||||||
|
public bias: tf.Tensor1D
|
||||||
|
) {}
|
||||||
|
}
|
|
@ -0,0 +1,155 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { Dimensions } from '../classes/Dimensions';
|
||||||
|
import { env } from '../env';
|
||||||
|
import { padToSquare } from '../ops/padToSquare';
|
||||||
|
import { computeReshapedDimensions, isTensor3D, isTensor4D, range } from '../utils';
|
||||||
|
import { createCanvasFromMedia } from './createCanvas';
|
||||||
|
import { imageToSquare } from './imageToSquare';
|
||||||
|
import { TResolvedNetInput } from './types';
|
||||||
|
|
||||||
|
export class NetInput {
|
||||||
|
private _imageTensors: Array<tf.Tensor3D | tf.Tensor4D> = []
|
||||||
|
private _canvases: HTMLCanvasElement[] = []
|
||||||
|
private _batchSize: number
|
||||||
|
private _treatAsBatchInput: boolean = false
|
||||||
|
|
||||||
|
private _inputDimensions: number[][] = []
|
||||||
|
private _inputSize: number
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
inputs: Array<TResolvedNetInput>,
|
||||||
|
treatAsBatchInput: boolean = false
|
||||||
|
) {
|
||||||
|
if (!Array.isArray(inputs)) {
|
||||||
|
throw new Error(`NetInput.constructor - expected inputs to be an Array of TResolvedNetInput or to be instanceof tf.Tensor4D, instead have ${inputs}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
this._treatAsBatchInput = treatAsBatchInput
|
||||||
|
this._batchSize = inputs.length
|
||||||
|
|
||||||
|
inputs.forEach((input, idx) => {
|
||||||
|
|
||||||
|
if (isTensor3D(input)) {
|
||||||
|
this._imageTensors[idx] = input
|
||||||
|
this._inputDimensions[idx] = input.shape
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isTensor4D(input)) {
|
||||||
|
const batchSize = input.shape[0]
|
||||||
|
if (batchSize !== 1) {
|
||||||
|
throw new Error(`NetInput - tf.Tensor4D with batchSize ${batchSize} passed, but not supported in input array`)
|
||||||
|
}
|
||||||
|
|
||||||
|
this._imageTensors[idx] = input
|
||||||
|
this._inputDimensions[idx] = input.shape.slice(1)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const canvas = input instanceof env.getEnv().Canvas ? input : createCanvasFromMedia(input)
|
||||||
|
this._canvases[idx] = canvas
|
||||||
|
this._inputDimensions[idx] = [canvas.height, canvas.width, 3]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public get imageTensors(): Array<tf.Tensor3D | tf.Tensor4D> {
|
||||||
|
return this._imageTensors
|
||||||
|
}
|
||||||
|
|
||||||
|
public get canvases(): HTMLCanvasElement[] {
|
||||||
|
return this._canvases
|
||||||
|
}
|
||||||
|
|
||||||
|
public get isBatchInput(): boolean {
|
||||||
|
return this.batchSize > 1 || this._treatAsBatchInput
|
||||||
|
}
|
||||||
|
|
||||||
|
public get batchSize(): number {
|
||||||
|
return this._batchSize
|
||||||
|
}
|
||||||
|
|
||||||
|
public get inputDimensions(): number[][] {
|
||||||
|
return this._inputDimensions
|
||||||
|
}
|
||||||
|
|
||||||
|
public get inputSize(): number | undefined {
|
||||||
|
return this._inputSize
|
||||||
|
}
|
||||||
|
|
||||||
|
public get reshapedInputDimensions(): Dimensions[] {
|
||||||
|
return range(this.batchSize, 0, 1).map(
|
||||||
|
(_, batchIdx) => this.getReshapedInputDimensions(batchIdx)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
public getInput(batchIdx: number): tf.Tensor3D | tf.Tensor4D | HTMLCanvasElement {
|
||||||
|
return this.canvases[batchIdx] || this.imageTensors[batchIdx]
|
||||||
|
}
|
||||||
|
|
||||||
|
public getInputDimensions(batchIdx: number): number[] {
|
||||||
|
return this._inputDimensions[batchIdx]
|
||||||
|
}
|
||||||
|
|
||||||
|
public getInputHeight(batchIdx: number): number {
|
||||||
|
return this._inputDimensions[batchIdx][0]
|
||||||
|
}
|
||||||
|
|
||||||
|
public getInputWidth(batchIdx: number): number {
|
||||||
|
return this._inputDimensions[batchIdx][1]
|
||||||
|
}
|
||||||
|
|
||||||
|
public getReshapedInputDimensions(batchIdx: number): Dimensions {
|
||||||
|
if (typeof this.inputSize !== 'number') {
|
||||||
|
throw new Error('getReshapedInputDimensions - inputSize not set, toBatchTensor has not been called yet')
|
||||||
|
}
|
||||||
|
|
||||||
|
const width = this.getInputWidth(batchIdx)
|
||||||
|
const height = this.getInputHeight(batchIdx)
|
||||||
|
return computeReshapedDimensions({ width, height }, this.inputSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a batch tensor from all input canvases and tensors
|
||||||
|
* with size [batchSize, inputSize, inputSize, 3].
|
||||||
|
*
|
||||||
|
* @param inputSize Height and width of the tensor.
|
||||||
|
* @param isCenterImage (optional, default: false) If true, add an equal amount of padding on
|
||||||
|
* both sides of the minor dimension oof the image.
|
||||||
|
* @returns The batch tensor.
|
||||||
|
*/
|
||||||
|
public toBatchTensor(inputSize: number, isCenterInputs: boolean = true): tf.Tensor4D {
|
||||||
|
|
||||||
|
this._inputSize = inputSize
|
||||||
|
|
||||||
|
return tf.tidy(() => {
|
||||||
|
|
||||||
|
const inputTensors = range(this.batchSize, 0, 1).map(batchIdx => {
|
||||||
|
const input = this.getInput(batchIdx)
|
||||||
|
|
||||||
|
if (input instanceof tf.Tensor) {
|
||||||
|
// @ts-ignore: error TS2344: Type 'Rank.R4' does not satisfy the constraint 'Tensor<Rank>'.
|
||||||
|
let imgTensor = isTensor4D(input) ? input : input.expandDims<tf.Rank.R4>()
|
||||||
|
// @ts-ignore: error TS2344: Type 'Rank.R4' does not satisfy the constraint 'Tensor<Rank>'.
|
||||||
|
imgTensor = padToSquare(imgTensor, isCenterInputs)
|
||||||
|
|
||||||
|
if (imgTensor.shape[1] !== inputSize || imgTensor.shape[2] !== inputSize) {
|
||||||
|
imgTensor = tf.image.resizeBilinear(imgTensor, [inputSize, inputSize])
|
||||||
|
}
|
||||||
|
|
||||||
|
return imgTensor.as3D(inputSize, inputSize, 3)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (input instanceof env.getEnv().Canvas) {
|
||||||
|
return tf.browser.fromPixels(imageToSquare(input, inputSize, isCenterInputs))
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`toBatchTensor - at batchIdx ${batchIdx}, expected input to be instanceof tf.Tensor or instanceof HTMLCanvasElement, instead have ${input}`)
|
||||||
|
})
|
||||||
|
|
||||||
|
const batchTensor = tf.stack(inputTensors.map(t => t.toFloat())).as4D(this.batchSize, inputSize, inputSize, 3)
|
||||||
|
|
||||||
|
return batchTensor
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
import { env } from '../env';
|
||||||
|
import { isMediaLoaded } from './isMediaLoaded';
|
||||||
|
|
||||||
|
export function awaitMediaLoaded(media: HTMLImageElement | HTMLVideoElement | HTMLCanvasElement) {
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
if (media instanceof env.getEnv().Canvas || isMediaLoaded(media)) {
|
||||||
|
return resolve()
|
||||||
|
}
|
||||||
|
|
||||||
|
function onLoad(e: Event) {
|
||||||
|
if (!e.currentTarget) return
|
||||||
|
e.currentTarget.removeEventListener('load', onLoad)
|
||||||
|
e.currentTarget.removeEventListener('error', onError)
|
||||||
|
resolve(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
function onError(e: Event) {
|
||||||
|
if (!e.currentTarget) return
|
||||||
|
e.currentTarget.removeEventListener('load', onLoad)
|
||||||
|
e.currentTarget.removeEventListener('error', onError)
|
||||||
|
reject(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
media.addEventListener('load', onLoad)
|
||||||
|
media.addEventListener('error', onError)
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,23 @@
|
||||||
|
import { env } from '../env';
|
||||||
|
|
||||||
|
export function bufferToImage(buf: Blob): Promise<HTMLImageElement> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
if (!(buf instanceof Blob)) {
|
||||||
|
return reject('bufferToImage - expected buf to be of type: Blob')
|
||||||
|
}
|
||||||
|
|
||||||
|
const reader = new FileReader()
|
||||||
|
reader.onload = () => {
|
||||||
|
if (typeof reader.result !== 'string') {
|
||||||
|
return reject('bufferToImage - expected reader.result to be a string, in onload')
|
||||||
|
}
|
||||||
|
|
||||||
|
const img = env.getEnv().createImageElement()
|
||||||
|
img.onload = () => resolve(img)
|
||||||
|
img.onerror = reject
|
||||||
|
img.src = reader.result
|
||||||
|
}
|
||||||
|
reader.onerror = reject
|
||||||
|
reader.readAsDataURL(buf)
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,33 @@
|
||||||
|
import { IDimensions } from '../classes/Dimensions';
|
||||||
|
import { env } from '../env';
|
||||||
|
import { getContext2dOrThrow } from './getContext2dOrThrow';
|
||||||
|
import { getMediaDimensions } from './getMediaDimensions';
|
||||||
|
import { isMediaLoaded } from './isMediaLoaded';
|
||||||
|
|
||||||
|
export function createCanvas({ width, height }: IDimensions): HTMLCanvasElement {
|
||||||
|
|
||||||
|
const { createCanvasElement } = env.getEnv()
|
||||||
|
const canvas = createCanvasElement()
|
||||||
|
canvas.width = width
|
||||||
|
canvas.height = height
|
||||||
|
return canvas
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createCanvasFromMedia(media: HTMLImageElement | HTMLVideoElement | ImageData, dims?: IDimensions): HTMLCanvasElement {
|
||||||
|
|
||||||
|
const { ImageData } = env.getEnv()
|
||||||
|
|
||||||
|
if (!(media instanceof ImageData) && !isMediaLoaded(media)) {
|
||||||
|
throw new Error('createCanvasFromMedia - media has not finished loading yet')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { width, height } = dims || getMediaDimensions(media)
|
||||||
|
const canvas = createCanvas({ width, height })
|
||||||
|
|
||||||
|
if (media instanceof ImageData) {
|
||||||
|
getContext2dOrThrow(canvas).putImageData(media, 0, 0)
|
||||||
|
} else {
|
||||||
|
getContext2dOrThrow(canvas).drawImage(media, 0, 0, width, height)
|
||||||
|
}
|
||||||
|
return canvas
|
||||||
|
}
|
|
@ -0,0 +1,46 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { Rect } from '../classes';
|
||||||
|
import { FaceDetection } from '../classes/FaceDetection';
|
||||||
|
import { isTensor3D, isTensor4D } from '../utils';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the tensors of the image regions containing the detected faces.
|
||||||
|
* Useful if you want to compute the face descriptors for the face images.
|
||||||
|
* Using this method is faster then extracting a canvas for each face and
|
||||||
|
* converting them to tensors individually.
|
||||||
|
*
|
||||||
|
* @param imageTensor The image tensor that face detection has been performed on.
|
||||||
|
* @param detections The face detection results or face bounding boxes for that image.
|
||||||
|
* @returns Tensors of the corresponding image region for each detected face.
|
||||||
|
*/
|
||||||
|
export async function extractFaceTensors(
|
||||||
|
imageTensor: tf.Tensor3D | tf.Tensor4D,
|
||||||
|
detections: Array<FaceDetection | Rect>
|
||||||
|
): Promise<tf.Tensor3D[]> {
|
||||||
|
|
||||||
|
if (!isTensor3D(imageTensor) && !isTensor4D(imageTensor)) {
|
||||||
|
throw new Error('extractFaceTensors - expected image tensor to be 3D or 4D')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isTensor4D(imageTensor) && imageTensor.shape[0] > 1) {
|
||||||
|
throw new Error('extractFaceTensors - batchSize > 1 not supported')
|
||||||
|
}
|
||||||
|
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const [imgHeight, imgWidth, numChannels] = imageTensor.shape.slice(isTensor4D(imageTensor) ? 1 : 0)
|
||||||
|
|
||||||
|
const boxes = detections.map(
|
||||||
|
det => det instanceof FaceDetection
|
||||||
|
? det.forSize(imgWidth, imgHeight).box
|
||||||
|
: det
|
||||||
|
)
|
||||||
|
.map(box => box.clipAtImageBorders(imgWidth, imgHeight))
|
||||||
|
|
||||||
|
const faceTensors = boxes.map(({ x, y, width, height }) =>
|
||||||
|
tf.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels])
|
||||||
|
)
|
||||||
|
|
||||||
|
return faceTensors
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,53 @@
|
||||||
|
import { FaceDetection } from '../classes/FaceDetection';
|
||||||
|
import { Rect } from '../classes/Rect';
|
||||||
|
import { env } from '../env';
|
||||||
|
import { createCanvas } from './createCanvas';
|
||||||
|
import { getContext2dOrThrow } from './getContext2dOrThrow';
|
||||||
|
import { imageTensorToCanvas } from './imageTensorToCanvas';
|
||||||
|
import { toNetInput } from './toNetInput';
|
||||||
|
import { TNetInput } from './types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the image regions containing the detected faces.
|
||||||
|
*
|
||||||
|
* @param input The image that face detection has been performed on.
|
||||||
|
* @param detections The face detection results or face bounding boxes for that image.
|
||||||
|
* @returns The Canvases of the corresponding image region for each detected face.
|
||||||
|
*/
|
||||||
|
export async function extractFaces(
|
||||||
|
input: TNetInput,
|
||||||
|
detections: Array<FaceDetection | Rect>
|
||||||
|
): Promise<HTMLCanvasElement[]> {
|
||||||
|
|
||||||
|
const { Canvas } = env.getEnv()
|
||||||
|
|
||||||
|
let canvas = input as HTMLCanvasElement
|
||||||
|
|
||||||
|
if (!(input instanceof Canvas)) {
|
||||||
|
const netInput = await toNetInput(input)
|
||||||
|
|
||||||
|
if (netInput.batchSize > 1) {
|
||||||
|
throw new Error('extractFaces - batchSize > 1 not supported')
|
||||||
|
}
|
||||||
|
|
||||||
|
const tensorOrCanvas = netInput.getInput(0)
|
||||||
|
canvas = tensorOrCanvas instanceof Canvas
|
||||||
|
? tensorOrCanvas
|
||||||
|
: await imageTensorToCanvas(tensorOrCanvas)
|
||||||
|
}
|
||||||
|
|
||||||
|
const ctx = getContext2dOrThrow(canvas)
|
||||||
|
const boxes = detections.map(
|
||||||
|
det => det instanceof FaceDetection
|
||||||
|
? det.forSize(canvas.width, canvas.height).box.floor()
|
||||||
|
: det
|
||||||
|
)
|
||||||
|
.map(box => box.clipAtImageBorders(canvas.width, canvas.height))
|
||||||
|
|
||||||
|
return boxes.map(({ x, y, width, height }) => {
|
||||||
|
const faceImg = createCanvas({ width, height })
|
||||||
|
getContext2dOrThrow(faceImg)
|
||||||
|
.putImageData(ctx.getImageData(x, y, width, height), 0, 0)
|
||||||
|
return faceImg
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,12 @@
|
||||||
|
import { bufferToImage } from './bufferToImage';
|
||||||
|
import { fetchOrThrow } from './fetchOrThrow';
|
||||||
|
|
||||||
|
export async function fetchImage(uri: string): Promise<HTMLImageElement> {
|
||||||
|
const res = await fetchOrThrow(uri)
|
||||||
|
const blob = await (res).blob()
|
||||||
|
|
||||||
|
if (!blob.type.startsWith('image/')) {
|
||||||
|
throw new Error(`fetchImage - expected blob type to be of type image/*, instead have: ${blob.type}, for url: ${res.url}`)
|
||||||
|
}
|
||||||
|
return bufferToImage(blob)
|
||||||
|
}
|
|
@ -0,0 +1,5 @@
|
||||||
|
import { fetchOrThrow } from './fetchOrThrow';
|
||||||
|
|
||||||
|
export async function fetchJson<T>(uri: string): Promise<T> {
|
||||||
|
return (await fetchOrThrow(uri)).json()
|
||||||
|
}
|
|
@ -0,0 +1,5 @@
|
||||||
|
import { fetchOrThrow } from './fetchOrThrow';
|
||||||
|
|
||||||
|
export async function fetchNetWeights(uri: string): Promise<Float32Array> {
|
||||||
|
return new Float32Array(await (await fetchOrThrow(uri)).arrayBuffer())
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
import { env } from '../env';
|
||||||
|
|
||||||
|
export async function fetchOrThrow(
|
||||||
|
url: string,
|
||||||
|
init?: RequestInit
|
||||||
|
): Promise<Response> {
|
||||||
|
|
||||||
|
const fetch = env.getEnv().fetch
|
||||||
|
const res = await fetch(url, init)
|
||||||
|
if (!(res.status < 400)) {
|
||||||
|
throw new Error(`failed to fetch: (${res.status}) ${res.statusText}, from url: ${res.url}`)
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
}
|
|
@ -0,0 +1,24 @@
|
||||||
|
import { env } from '../env';
|
||||||
|
import { resolveInput } from './resolveInput';
|
||||||
|
|
||||||
|
export function getContext2dOrThrow(canvasArg: string | HTMLCanvasElement | CanvasRenderingContext2D): CanvasRenderingContext2D {
|
||||||
|
|
||||||
|
const { Canvas, CanvasRenderingContext2D } = env.getEnv()
|
||||||
|
|
||||||
|
if (canvasArg instanceof CanvasRenderingContext2D) {
|
||||||
|
return canvasArg
|
||||||
|
}
|
||||||
|
|
||||||
|
const canvas = resolveInput(canvasArg)
|
||||||
|
|
||||||
|
if (!(canvas instanceof Canvas)) {
|
||||||
|
throw new Error('resolveContext2d - expected canvas to be of instance of Canvas')
|
||||||
|
}
|
||||||
|
|
||||||
|
const ctx = canvas.getContext('2d')
|
||||||
|
if (!ctx) {
|
||||||
|
throw new Error('resolveContext2d - canvas 2d context is null')
|
||||||
|
}
|
||||||
|
|
||||||
|
return ctx
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
import { Dimensions, IDimensions } from '../classes/Dimensions';
|
||||||
|
import { env } from '../env';
|
||||||
|
|
||||||
|
export function getMediaDimensions(input: HTMLImageElement | HTMLCanvasElement | HTMLVideoElement | IDimensions): Dimensions {
|
||||||
|
|
||||||
|
const { Image, Video } = env.getEnv()
|
||||||
|
|
||||||
|
if (input instanceof Image) {
|
||||||
|
return new Dimensions(input.naturalWidth, input.naturalHeight)
|
||||||
|
}
|
||||||
|
if (input instanceof Video) {
|
||||||
|
return new Dimensions(input.videoWidth, input.videoHeight)
|
||||||
|
}
|
||||||
|
return new Dimensions(input.width, input.height)
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { env } from '../env';
|
||||||
|
import { isTensor4D } from '../utils';
|
||||||
|
|
||||||
|
export async function imageTensorToCanvas(
|
||||||
|
imgTensor: tf.Tensor,
|
||||||
|
canvas?: HTMLCanvasElement
|
||||||
|
): Promise<HTMLCanvasElement> {
|
||||||
|
|
||||||
|
const targetCanvas = canvas || env.getEnv().createCanvasElement()
|
||||||
|
|
||||||
|
const [height, width, numChannels] = imgTensor.shape.slice(isTensor4D(imgTensor) ? 1 : 0)
|
||||||
|
const imgTensor3D = tf.tidy(() => imgTensor.as3D(height, width, numChannels).toInt())
|
||||||
|
await tf.browser.toPixels(imgTensor3D, targetCanvas)
|
||||||
|
|
||||||
|
imgTensor3D.dispose()
|
||||||
|
|
||||||
|
return targetCanvas
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
import { env } from '../env';
|
||||||
|
import { createCanvas, createCanvasFromMedia } from './createCanvas';
|
||||||
|
import { getContext2dOrThrow } from './getContext2dOrThrow';
|
||||||
|
import { getMediaDimensions } from './getMediaDimensions';
|
||||||
|
|
||||||
|
export function imageToSquare(input: HTMLImageElement | HTMLCanvasElement, inputSize: number, centerImage: boolean = false) {
|
||||||
|
|
||||||
|
const { Image, Canvas } = env.getEnv()
|
||||||
|
|
||||||
|
if (!(input instanceof Image || input instanceof Canvas)) {
|
||||||
|
throw new Error('imageToSquare - expected arg0 to be HTMLImageElement | HTMLCanvasElement')
|
||||||
|
}
|
||||||
|
|
||||||
|
const dims = getMediaDimensions(input)
|
||||||
|
const scale = inputSize / Math.max(dims.height, dims.width)
|
||||||
|
const width = scale * dims.width
|
||||||
|
const height = scale * dims.height
|
||||||
|
|
||||||
|
const targetCanvas = createCanvas({ width: inputSize, height: inputSize })
|
||||||
|
const inputCanvas = input instanceof Canvas ? input : createCanvasFromMedia(input)
|
||||||
|
|
||||||
|
const offset = Math.abs(width - height) / 2
|
||||||
|
const dx = centerImage && width < height ? offset : 0
|
||||||
|
const dy = centerImage && height < width ? offset : 0
|
||||||
|
getContext2dOrThrow(targetCanvas).drawImage(inputCanvas, dx, dy, width, height)
|
||||||
|
|
||||||
|
return targetCanvas
|
||||||
|
}
|
|
@ -0,0 +1,21 @@
|
||||||
|
export * from './awaitMediaLoaded'
|
||||||
|
export * from './bufferToImage'
|
||||||
|
export * from './createCanvas'
|
||||||
|
export * from './extractFaces'
|
||||||
|
export * from './extractFaceTensors'
|
||||||
|
export * from './fetchImage'
|
||||||
|
export * from './fetchJson'
|
||||||
|
export * from './fetchNetWeights'
|
||||||
|
export * from './fetchOrThrow'
|
||||||
|
export * from './getContext2dOrThrow'
|
||||||
|
export * from './getMediaDimensions'
|
||||||
|
export * from './imageTensorToCanvas'
|
||||||
|
export * from './imageToSquare'
|
||||||
|
export * from './isMediaElement'
|
||||||
|
export * from './isMediaLoaded'
|
||||||
|
export * from './loadWeightMap'
|
||||||
|
export * from './matchDimensions'
|
||||||
|
export * from './NetInput'
|
||||||
|
export * from './resolveInput'
|
||||||
|
export * from './toNetInput'
|
||||||
|
export * from './types'
|
|
@ -0,0 +1,10 @@
|
||||||
|
import { env } from '../env';
|
||||||
|
|
||||||
|
export function isMediaElement(input: any) {
|
||||||
|
|
||||||
|
const { Image, Canvas, Video } = env.getEnv()
|
||||||
|
|
||||||
|
return input instanceof Image
|
||||||
|
|| input instanceof Canvas
|
||||||
|
|| input instanceof Video
|
||||||
|
}
|
|
@ -0,0 +1,9 @@
|
||||||
|
import { env } from '../env';
|
||||||
|
|
||||||
|
export function isMediaLoaded(media: HTMLImageElement | HTMLVideoElement) : boolean {
|
||||||
|
|
||||||
|
const { Image, Video } = env.getEnv()
|
||||||
|
|
||||||
|
return (media instanceof Image && media.complete)
|
||||||
|
|| (media instanceof Video && media.readyState >= 3)
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { getModelUris } from '../common/getModelUris';
|
||||||
|
import { fetchJson } from './fetchJson';
|
||||||
|
|
||||||
|
export async function loadWeightMap(
|
||||||
|
uri: string | undefined,
|
||||||
|
defaultModelName: string,
|
||||||
|
): Promise<tf.NamedTensorMap> {
|
||||||
|
const { manifestUri, modelBaseUri } = getModelUris(uri, defaultModelName)
|
||||||
|
|
||||||
|
const manifest = await fetchJson<tf.io.WeightsManifestConfig>(manifestUri)
|
||||||
|
|
||||||
|
return tf.io.loadWeights(manifest, modelBaseUri)
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
import { IDimensions } from '../classes';
|
||||||
|
import { getMediaDimensions } from './getMediaDimensions';
|
||||||
|
|
||||||
|
export function matchDimensions(input: IDimensions, reference: IDimensions, useMediaDimensions: boolean = false) {
|
||||||
|
const { width, height } = useMediaDimensions
|
||||||
|
? getMediaDimensions(reference)
|
||||||
|
: reference
|
||||||
|
input.width = width
|
||||||
|
input.height = height
|
||||||
|
return { width, height }
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
import { env } from '../env';
|
||||||
|
|
||||||
|
export function resolveInput(arg: string | any) {
|
||||||
|
if (!env.isNodejs() && typeof arg === 'string') {
|
||||||
|
return document.getElementById(arg)
|
||||||
|
}
|
||||||
|
return arg
|
||||||
|
}
|
|
@ -0,0 +1,57 @@
|
||||||
|
import { isTensor3D, isTensor4D } from '../utils';
|
||||||
|
import { awaitMediaLoaded } from './awaitMediaLoaded';
|
||||||
|
import { isMediaElement } from './isMediaElement';
|
||||||
|
import { NetInput } from './NetInput';
|
||||||
|
import { resolveInput } from './resolveInput';
|
||||||
|
import { TNetInput } from './types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates the input to make sure, they are valid net inputs and awaits all media elements
|
||||||
|
* to be finished loading.
|
||||||
|
*
|
||||||
|
* @param input The input, which can be a media element or an array of different media elements.
|
||||||
|
* @returns A NetInput instance, which can be passed into one of the neural networks.
|
||||||
|
*/
|
||||||
|
export async function toNetInput(inputs: TNetInput): Promise<NetInput> {
|
||||||
|
if (inputs instanceof NetInput) {
|
||||||
|
return inputs
|
||||||
|
}
|
||||||
|
|
||||||
|
let inputArgArray = Array.isArray(inputs)
|
||||||
|
? inputs
|
||||||
|
: [inputs]
|
||||||
|
|
||||||
|
if (!inputArgArray.length) {
|
||||||
|
throw new Error('toNetInput - empty array passed as input')
|
||||||
|
}
|
||||||
|
|
||||||
|
const getIdxHint = (idx: number) => Array.isArray(inputs) ? ` at input index ${idx}:` : ''
|
||||||
|
|
||||||
|
const inputArray = inputArgArray.map(resolveInput)
|
||||||
|
|
||||||
|
inputArray.forEach((input, i) => {
|
||||||
|
if (!isMediaElement(input) && !isTensor3D(input) && !isTensor4D(input)) {
|
||||||
|
|
||||||
|
if (typeof inputArgArray[i] === 'string') {
|
||||||
|
throw new Error(`toNetInput -${getIdxHint(i)} string passed, but could not resolve HTMLElement for element id ${inputArgArray[i]}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`toNetInput -${getIdxHint(i)} expected media to be of type HTMLImageElement | HTMLVideoElement | HTMLCanvasElement | tf.Tensor3D, or to be an element id`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isTensor4D(input)) {
|
||||||
|
// if tf.Tensor4D is passed in the input array, the batch size has to be 1
|
||||||
|
const batchSize = input.shape[0]
|
||||||
|
if (batchSize !== 1) {
|
||||||
|
throw new Error(`toNetInput -${getIdxHint(i)} tf.Tensor4D with batchSize ${batchSize} passed, but not supported in input array`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// wait for all media elements being loaded
|
||||||
|
await Promise.all(
|
||||||
|
inputArray.map(input => isMediaElement(input) && awaitMediaLoaded(input))
|
||||||
|
)
|
||||||
|
|
||||||
|
return new NetInput(inputArray, Array.isArray(inputs))
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { NetInput } from './NetInput';
|
||||||
|
|
||||||
|
export type TMediaElement = HTMLImageElement | HTMLVideoElement | HTMLCanvasElement
|
||||||
|
|
||||||
|
export type TResolvedNetInput = TMediaElement | tf.Tensor3D | tf.Tensor4D
|
||||||
|
|
||||||
|
export type TNetInputArg = string | TResolvedNetInput
|
||||||
|
|
||||||
|
export type TNetInput = TNetInputArg | Array<TNetInputArg> | NetInput | tf.Tensor4D
|
|
@ -0,0 +1,59 @@
|
||||||
|
import { Box, IBoundingBox, IRect } from '../classes';
|
||||||
|
import { getContext2dOrThrow } from '../dom/getContext2dOrThrow';
|
||||||
|
import { AnchorPosition, DrawTextField, DrawTextFieldOptions, IDrawTextFieldOptions } from './DrawTextField';
|
||||||
|
|
||||||
|
export interface IDrawBoxOptions {
|
||||||
|
boxColor?: string
|
||||||
|
lineWidth?: number
|
||||||
|
drawLabelOptions?: IDrawTextFieldOptions
|
||||||
|
label?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DrawBoxOptions {
|
||||||
|
public boxColor: string
|
||||||
|
public lineWidth: number
|
||||||
|
public drawLabelOptions: DrawTextFieldOptions
|
||||||
|
public label?: string
|
||||||
|
|
||||||
|
constructor(options: IDrawBoxOptions = {}) {
|
||||||
|
const { boxColor, lineWidth, label, drawLabelOptions } = options
|
||||||
|
this.boxColor = boxColor || 'rgba(0, 0, 255, 1)'
|
||||||
|
this.lineWidth = lineWidth || 2
|
||||||
|
this.label = label
|
||||||
|
|
||||||
|
const defaultDrawLabelOptions = {
|
||||||
|
anchorPosition: AnchorPosition.BOTTOM_LEFT,
|
||||||
|
backgroundColor: this.boxColor
|
||||||
|
}
|
||||||
|
this.drawLabelOptions = new DrawTextFieldOptions(Object.assign({}, defaultDrawLabelOptions, drawLabelOptions))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DrawBox {
|
||||||
|
public box: Box
|
||||||
|
public options: DrawBoxOptions
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
box: IBoundingBox | IRect,
|
||||||
|
options: IDrawBoxOptions = {}
|
||||||
|
) {
|
||||||
|
this.box = new Box(box)
|
||||||
|
this.options = new DrawBoxOptions(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
draw(canvasArg: string | HTMLCanvasElement | CanvasRenderingContext2D) {
|
||||||
|
const ctx = getContext2dOrThrow(canvasArg)
|
||||||
|
|
||||||
|
const { boxColor, lineWidth } = this.options
|
||||||
|
|
||||||
|
const { x, y, width, height } = this.box
|
||||||
|
ctx.strokeStyle = boxColor
|
||||||
|
ctx.lineWidth = lineWidth
|
||||||
|
ctx.strokeRect(x, y, width, height)
|
||||||
|
|
||||||
|
const { label } = this.options
|
||||||
|
if (label) {
|
||||||
|
new DrawTextField([label], { x: x - (lineWidth / 2), y }, this.options.drawLabelOptions).draw(canvasArg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,97 @@
|
||||||
|
import { IPoint } from '../classes';
|
||||||
|
import { FaceLandmarks } from '../classes/FaceLandmarks';
|
||||||
|
import { FaceLandmarks68 } from '../classes/FaceLandmarks68';
|
||||||
|
import { getContext2dOrThrow } from '../dom/getContext2dOrThrow';
|
||||||
|
import { WithFaceDetection } from '../factories/WithFaceDetection';
|
||||||
|
import { isWithFaceLandmarks, WithFaceLandmarks } from '../factories/WithFaceLandmarks';
|
||||||
|
import { drawContour } from './drawContour';
|
||||||
|
|
||||||
|
export interface IDrawFaceLandmarksOptions {
|
||||||
|
drawLines?: boolean
|
||||||
|
drawPoints?: boolean
|
||||||
|
lineWidth?: number
|
||||||
|
pointSize?: number
|
||||||
|
lineColor?: string
|
||||||
|
pointColor?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DrawFaceLandmarksOptions {
|
||||||
|
public drawLines: boolean
|
||||||
|
public drawPoints: boolean
|
||||||
|
public lineWidth: number
|
||||||
|
public pointSize: number
|
||||||
|
public lineColor: string
|
||||||
|
public pointColor: string
|
||||||
|
|
||||||
|
constructor(options: IDrawFaceLandmarksOptions = {}) {
|
||||||
|
const { drawLines = true, drawPoints = true, lineWidth, lineColor, pointSize, pointColor } = options
|
||||||
|
this.drawLines = drawLines
|
||||||
|
this.drawPoints = drawPoints
|
||||||
|
this.lineWidth = lineWidth || 1
|
||||||
|
this.pointSize = pointSize || 2
|
||||||
|
this.lineColor = lineColor || 'rgba(0, 255, 255, 1)'
|
||||||
|
this.pointColor = pointColor || 'rgba(255, 0, 255, 1)'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DrawFaceLandmarks {
|
||||||
|
public faceLandmarks: FaceLandmarks
|
||||||
|
public options: DrawFaceLandmarksOptions
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
faceLandmarks: FaceLandmarks,
|
||||||
|
options: IDrawFaceLandmarksOptions = {}
|
||||||
|
) {
|
||||||
|
this.faceLandmarks = faceLandmarks
|
||||||
|
this.options = new DrawFaceLandmarksOptions(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
draw(canvasArg: string | HTMLCanvasElement | CanvasRenderingContext2D) {
|
||||||
|
const ctx = getContext2dOrThrow(canvasArg)
|
||||||
|
|
||||||
|
const { drawLines, drawPoints, lineWidth, lineColor, pointSize, pointColor } = this.options
|
||||||
|
|
||||||
|
if (drawLines && this.faceLandmarks instanceof FaceLandmarks68) {
|
||||||
|
ctx.strokeStyle = lineColor
|
||||||
|
ctx.lineWidth = lineWidth
|
||||||
|
drawContour(ctx, this.faceLandmarks.getJawOutline())
|
||||||
|
drawContour(ctx, this.faceLandmarks.getLeftEyeBrow())
|
||||||
|
drawContour(ctx, this.faceLandmarks.getRightEyeBrow())
|
||||||
|
drawContour(ctx, this.faceLandmarks.getNose())
|
||||||
|
drawContour(ctx, this.faceLandmarks.getLeftEye(), true)
|
||||||
|
drawContour(ctx, this.faceLandmarks.getRightEye(), true)
|
||||||
|
drawContour(ctx, this.faceLandmarks.getMouth(), true)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (drawPoints) {
|
||||||
|
ctx.strokeStyle = pointColor
|
||||||
|
ctx.fillStyle = pointColor
|
||||||
|
|
||||||
|
const drawPoint = (pt: IPoint) => {
|
||||||
|
ctx.beginPath()
|
||||||
|
ctx.arc(pt.x, pt.y, pointSize, 0, 2 * Math.PI)
|
||||||
|
ctx.fill()
|
||||||
|
}
|
||||||
|
this.faceLandmarks.positions.forEach(drawPoint)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DrawFaceLandmarksInput = FaceLandmarks | WithFaceLandmarks<WithFaceDetection<{}>>
|
||||||
|
|
||||||
|
export function drawFaceLandmarks(
|
||||||
|
canvasArg: string | HTMLCanvasElement,
|
||||||
|
faceLandmarks: DrawFaceLandmarksInput | Array<DrawFaceLandmarksInput>
|
||||||
|
) {
|
||||||
|
const faceLandmarksArray = Array.isArray(faceLandmarks) ? faceLandmarks : [faceLandmarks]
|
||||||
|
faceLandmarksArray.forEach(f => {
|
||||||
|
const landmarks = f instanceof FaceLandmarks
|
||||||
|
? f
|
||||||
|
: (isWithFaceLandmarks(f) ? f.landmarks : undefined)
|
||||||
|
if (!landmarks) {
|
||||||
|
throw new Error('drawFaceLandmarks - expected faceExpressions to be FaceLandmarks | WithFaceLandmarks<WithFaceDetection<{}>> or array thereof')
|
||||||
|
}
|
||||||
|
|
||||||
|
new DrawFaceLandmarks(landmarks).draw(canvasArg)
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,108 @@
|
||||||
|
import { IDimensions, IPoint } from '../classes';
|
||||||
|
import { getContext2dOrThrow } from '../dom/getContext2dOrThrow';
|
||||||
|
import { resolveInput } from '../dom/resolveInput';
|
||||||
|
|
||||||
|
export enum AnchorPosition {
|
||||||
|
TOP_LEFT = 'TOP_LEFT',
|
||||||
|
TOP_RIGHT = 'TOP_RIGHT',
|
||||||
|
BOTTOM_LEFT = 'BOTTOM_LEFT',
|
||||||
|
BOTTOM_RIGHT = 'BOTTOM_RIGHT'
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IDrawTextFieldOptions {
|
||||||
|
anchorPosition?: AnchorPosition
|
||||||
|
backgroundColor?: string
|
||||||
|
fontColor?: string
|
||||||
|
fontSize?: number
|
||||||
|
fontStyle?: string
|
||||||
|
padding?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DrawTextFieldOptions implements IDrawTextFieldOptions {
|
||||||
|
public anchorPosition: AnchorPosition
|
||||||
|
public backgroundColor: string
|
||||||
|
public fontColor: string
|
||||||
|
public fontSize: number
|
||||||
|
public fontStyle: string
|
||||||
|
public padding: number
|
||||||
|
|
||||||
|
constructor(options: IDrawTextFieldOptions = {}) {
|
||||||
|
const { anchorPosition, backgroundColor, fontColor, fontSize, fontStyle, padding } = options
|
||||||
|
this.anchorPosition = anchorPosition || AnchorPosition.TOP_LEFT
|
||||||
|
this.backgroundColor = backgroundColor || 'rgba(0, 0, 0, 0.5)'
|
||||||
|
this.fontColor = fontColor || 'rgba(255, 255, 255, 1)'
|
||||||
|
this.fontSize = fontSize || 14
|
||||||
|
this.fontStyle = fontStyle || 'Georgia'
|
||||||
|
this.padding = padding || 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DrawTextField {
|
||||||
|
public text: string[]
|
||||||
|
public anchor : IPoint
|
||||||
|
public options: DrawTextFieldOptions
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
text: string | string[] | DrawTextField,
|
||||||
|
anchor: IPoint,
|
||||||
|
options: IDrawTextFieldOptions = {}
|
||||||
|
) {
|
||||||
|
this.text = typeof text === 'string'
|
||||||
|
? [text]
|
||||||
|
: (text instanceof DrawTextField ? text.text : text)
|
||||||
|
this.anchor = anchor
|
||||||
|
this.options = new DrawTextFieldOptions(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
measureWidth(ctx: CanvasRenderingContext2D): number {
|
||||||
|
const { padding } = this.options
|
||||||
|
return this.text.map(l => ctx.measureText(l).width).reduce((w0, w1) => w0 < w1 ? w1 : w0, 0) + (2 * padding)
|
||||||
|
}
|
||||||
|
|
||||||
|
measureHeight(): number {
|
||||||
|
const { fontSize, padding } = this.options
|
||||||
|
return this.text.length * fontSize + (2 * padding)
|
||||||
|
}
|
||||||
|
|
||||||
|
getUpperLeft(ctx: CanvasRenderingContext2D, canvasDims?: IDimensions): IPoint {
|
||||||
|
const { anchorPosition } = this.options
|
||||||
|
const isShiftLeft = anchorPosition === AnchorPosition.BOTTOM_RIGHT || anchorPosition === AnchorPosition.TOP_RIGHT
|
||||||
|
const isShiftTop = anchorPosition === AnchorPosition.BOTTOM_LEFT || anchorPosition === AnchorPosition.BOTTOM_RIGHT
|
||||||
|
|
||||||
|
const textFieldWidth = this.measureWidth(ctx)
|
||||||
|
const textFieldHeight = this.measureHeight()
|
||||||
|
const x = (isShiftLeft ? this.anchor.x - textFieldWidth : this.anchor.x)
|
||||||
|
const y = isShiftTop ? this.anchor.y - textFieldHeight : this.anchor.y
|
||||||
|
|
||||||
|
// adjust anchor if text box exceeds canvas borders
|
||||||
|
if (canvasDims) {
|
||||||
|
const { width, height } = canvasDims
|
||||||
|
const newX = Math.max(Math.min(x, width - textFieldWidth), 0)
|
||||||
|
const newY = Math.max(Math.min(y, height - textFieldHeight), 0)
|
||||||
|
return { x: newX, y: newY }
|
||||||
|
}
|
||||||
|
return { x, y }
|
||||||
|
}
|
||||||
|
|
||||||
|
draw(canvasArg: string | HTMLCanvasElement | CanvasRenderingContext2D) {
|
||||||
|
const canvas = resolveInput(canvasArg)
|
||||||
|
const ctx = getContext2dOrThrow(canvas)
|
||||||
|
|
||||||
|
const { backgroundColor, fontColor, fontSize, fontStyle, padding } = this.options
|
||||||
|
|
||||||
|
ctx.font = `${fontSize}px ${fontStyle}`
|
||||||
|
const maxTextWidth = this.measureWidth(ctx)
|
||||||
|
const textHeight = this.measureHeight()
|
||||||
|
|
||||||
|
ctx.fillStyle = backgroundColor
|
||||||
|
const upperLeft = this.getUpperLeft(ctx, canvas)
|
||||||
|
ctx.fillRect(upperLeft.x, upperLeft.y, maxTextWidth, textHeight)
|
||||||
|
|
||||||
|
ctx.fillStyle = fontColor;
|
||||||
|
this.text.forEach((textLine, i) => {
|
||||||
|
const x = padding + upperLeft.x
|
||||||
|
const y = padding + upperLeft.y + ((i + 1) * fontSize)
|
||||||
|
ctx.fillText(textLine, x, y)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
import { Point } from '../classes';
|
||||||
|
|
||||||
|
export function drawContour(
|
||||||
|
ctx: CanvasRenderingContext2D,
|
||||||
|
points: Point[],
|
||||||
|
isClosed: boolean = false
|
||||||
|
) {
|
||||||
|
ctx.beginPath()
|
||||||
|
|
||||||
|
points.slice(1).forEach(({ x, y }, prevIdx) => {
|
||||||
|
const from = points[prevIdx]
|
||||||
|
ctx.moveTo(from.x, from.y)
|
||||||
|
ctx.lineTo(x, y)
|
||||||
|
})
|
||||||
|
|
||||||
|
if (isClosed) {
|
||||||
|
const from = points[points.length - 1]
|
||||||
|
const to = points[0]
|
||||||
|
if (!from || !to) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.moveTo(from.x, from.y)
|
||||||
|
ctx.lineTo(to.x, to.y)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.stroke()
|
||||||
|
}
|
|
@ -0,0 +1,27 @@
|
||||||
|
import { Box, IBoundingBox, IRect } from '../classes';
|
||||||
|
import { FaceDetection } from '../classes/FaceDetection';
|
||||||
|
import { isWithFaceDetection, WithFaceDetection } from '../factories/WithFaceDetection';
|
||||||
|
import { round } from '../utils';
|
||||||
|
import { DrawBox } from './DrawBox';
|
||||||
|
|
||||||
|
export type TDrawDetectionsInput = IRect | IBoundingBox | FaceDetection | WithFaceDetection<{}>
|
||||||
|
|
||||||
|
export function drawDetections(
|
||||||
|
canvasArg: string | HTMLCanvasElement,
|
||||||
|
detections: TDrawDetectionsInput | Array<TDrawDetectionsInput>
|
||||||
|
) {
|
||||||
|
const detectionsArray = Array.isArray(detections) ? detections : [detections]
|
||||||
|
|
||||||
|
detectionsArray.forEach(det => {
|
||||||
|
const score = det instanceof FaceDetection
|
||||||
|
? det.score
|
||||||
|
: (isWithFaceDetection(det) ? det.detection.score : undefined)
|
||||||
|
|
||||||
|
const box = det instanceof FaceDetection
|
||||||
|
? det.box
|
||||||
|
: (isWithFaceDetection(det) ? det.detection.box : new Box(det))
|
||||||
|
|
||||||
|
const label = score ? `${round(score)}` : undefined
|
||||||
|
new DrawBox(box, { label }).draw(canvasArg)
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,39 @@
|
||||||
|
import { IPoint, Point } from '../classes';
|
||||||
|
import { FaceExpressions } from '../faceExpressionNet';
|
||||||
|
import { isWithFaceDetection } from '../factories/WithFaceDetection';
|
||||||
|
import { isWithFaceExpressions, WithFaceExpressions } from '../factories/WithFaceExpressions';
|
||||||
|
import { round } from '../utils';
|
||||||
|
import { DrawTextField } from './DrawTextField';
|
||||||
|
|
||||||
|
export type DrawFaceExpressionsInput = FaceExpressions | WithFaceExpressions<{}>
|
||||||
|
|
||||||
|
export function drawFaceExpressions(
|
||||||
|
canvasArg: string | HTMLCanvasElement,
|
||||||
|
faceExpressions: DrawFaceExpressionsInput | Array<DrawFaceExpressionsInput>,
|
||||||
|
minConfidence = 0.1,
|
||||||
|
textFieldAnchor?: IPoint
|
||||||
|
) {
|
||||||
|
const faceExpressionsArray = Array.isArray(faceExpressions) ? faceExpressions : [faceExpressions]
|
||||||
|
|
||||||
|
faceExpressionsArray.forEach(e => {
|
||||||
|
const expr = e instanceof FaceExpressions
|
||||||
|
? e
|
||||||
|
: (isWithFaceExpressions(e) ? e.expressions : undefined)
|
||||||
|
if (!expr) {
|
||||||
|
throw new Error('drawFaceExpressions - expected faceExpressions to be FaceExpressions | WithFaceExpressions<{}> or array thereof')
|
||||||
|
}
|
||||||
|
|
||||||
|
const sorted = expr.asSortedArray()
|
||||||
|
const resultsToDisplay = sorted.filter(expr => expr.probability > minConfidence)
|
||||||
|
|
||||||
|
const anchor = isWithFaceDetection(e)
|
||||||
|
? e.detection.box.bottomLeft
|
||||||
|
: (textFieldAnchor || new Point(0, 0))
|
||||||
|
|
||||||
|
const drawTextField = new DrawTextField(
|
||||||
|
resultsToDisplay.map(expr => `${expr.expression} (${round(expr.probability)})`),
|
||||||
|
anchor
|
||||||
|
)
|
||||||
|
drawTextField.draw(canvasArg)
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
export * from './drawContour'
|
||||||
|
export * from './drawDetections'
|
||||||
|
export * from './drawFaceExpressions'
|
||||||
|
export * from './DrawBox'
|
||||||
|
export * from './DrawFaceLandmarks'
|
||||||
|
export * from './DrawTextField'
|
|
@ -0,0 +1,24 @@
|
||||||
|
import { Environment } from './types';
|
||||||
|
|
||||||
|
export function createBrowserEnv(): Environment {
|
||||||
|
|
||||||
|
const fetch = window['fetch'] || function() {
|
||||||
|
throw new Error('fetch - missing fetch implementation for browser environment')
|
||||||
|
}
|
||||||
|
|
||||||
|
const readFile = function() {
|
||||||
|
throw new Error('readFile - filesystem not available for browser environment')
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
Canvas: HTMLCanvasElement,
|
||||||
|
CanvasRenderingContext2D: CanvasRenderingContext2D,
|
||||||
|
Image: HTMLImageElement,
|
||||||
|
ImageData: ImageData,
|
||||||
|
Video: HTMLVideoElement,
|
||||||
|
createCanvasElement: () => document.createElement('canvas'),
|
||||||
|
createImageElement: () => document.createElement('img'),
|
||||||
|
fetch,
|
||||||
|
readFile
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
import { FileSystem } from './types';
|
||||||
|
|
||||||
|
export function createFileSystem(fs?: any): FileSystem {
|
||||||
|
|
||||||
|
let requireFsError = ''
|
||||||
|
|
||||||
|
if (!fs) {
|
||||||
|
try {
|
||||||
|
fs = require('fs')
|
||||||
|
} catch (err) {
|
||||||
|
requireFsError = err.toString()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const readFile = fs
|
||||||
|
? function(filePath: string) {
|
||||||
|
return new Promise<Buffer>((res, rej) => {
|
||||||
|
fs.readFile(filePath, function(err: any, buffer: Buffer) {
|
||||||
|
return err ? rej(err) : res(buffer)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
: function() {
|
||||||
|
throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
readFile
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,40 @@
|
||||||
|
import { createFileSystem } from './createFileSystem';
|
||||||
|
import { Environment } from './types';
|
||||||
|
|
||||||
|
export function createNodejsEnv(): Environment {
|
||||||
|
|
||||||
|
const Canvas = global['Canvas'] || global['HTMLCanvasElement']
|
||||||
|
const Image = global['Image'] || global['HTMLImageElement']
|
||||||
|
|
||||||
|
const createCanvasElement = function() {
|
||||||
|
if (Canvas) {
|
||||||
|
return new Canvas()
|
||||||
|
}
|
||||||
|
throw new Error('createCanvasElement - missing Canvas implementation for nodejs environment')
|
||||||
|
}
|
||||||
|
|
||||||
|
const createImageElement = function() {
|
||||||
|
if (Image) {
|
||||||
|
return new Image()
|
||||||
|
}
|
||||||
|
throw new Error('createImageElement - missing Image implementation for nodejs environment')
|
||||||
|
}
|
||||||
|
|
||||||
|
const fetch = global['fetch'] || function() {
|
||||||
|
throw new Error('fetch - missing fetch implementation for nodejs environment')
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileSystem = createFileSystem()
|
||||||
|
|
||||||
|
return {
|
||||||
|
Canvas: Canvas || class {},
|
||||||
|
CanvasRenderingContext2D: global['CanvasRenderingContext2D'] || class {},
|
||||||
|
Image: Image || class {},
|
||||||
|
ImageData: global['ImageData'] || class {},
|
||||||
|
Video: global['HTMLVideoElement'] || class {},
|
||||||
|
createCanvasElement,
|
||||||
|
createImageElement,
|
||||||
|
fetch,
|
||||||
|
...fileSystem
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,67 @@
|
||||||
|
import { createBrowserEnv } from './createBrowserEnv';
|
||||||
|
import { createFileSystem } from './createFileSystem';
|
||||||
|
import { createNodejsEnv } from './createNodejsEnv';
|
||||||
|
import { isBrowser } from './isBrowser';
|
||||||
|
import { isNodejs } from './isNodejs';
|
||||||
|
import { Environment } from './types';
|
||||||
|
|
||||||
|
let environment: Environment | null
|
||||||
|
|
||||||
|
function getEnv(): Environment {
|
||||||
|
if (!environment) {
|
||||||
|
throw new Error('getEnv - environment is not defined, check isNodejs() and isBrowser()')
|
||||||
|
}
|
||||||
|
return environment
|
||||||
|
}
|
||||||
|
|
||||||
|
function setEnv(env: Environment) {
|
||||||
|
environment = env
|
||||||
|
}
|
||||||
|
|
||||||
|
function initialize() {
|
||||||
|
// check for isBrowser() first to prevent electron renderer process
|
||||||
|
// to be initialized with wrong environment due to isNodejs() returning true
|
||||||
|
if (isBrowser()) {
|
||||||
|
return setEnv(createBrowserEnv())
|
||||||
|
}
|
||||||
|
if (isNodejs()) {
|
||||||
|
return setEnv(createNodejsEnv())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function monkeyPatch(env: Partial<Environment>) {
|
||||||
|
if (!environment) {
|
||||||
|
initialize()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!environment) {
|
||||||
|
throw new Error('monkeyPatch - environment is not defined, check isNodejs() and isBrowser()')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { Canvas = environment.Canvas, Image = environment.Image } = env
|
||||||
|
environment.Canvas = Canvas
|
||||||
|
environment.Image = Image
|
||||||
|
environment.createCanvasElement = env.createCanvasElement || (() => new Canvas())
|
||||||
|
environment.createImageElement = env.createImageElement || (() => new Image())
|
||||||
|
|
||||||
|
environment.ImageData = env.ImageData || environment.ImageData
|
||||||
|
environment.Video = env.Video || environment.Video
|
||||||
|
environment.fetch = env.fetch || environment.fetch
|
||||||
|
environment.readFile = env.readFile || environment.readFile
|
||||||
|
}
|
||||||
|
|
||||||
|
export const env = {
|
||||||
|
getEnv,
|
||||||
|
setEnv,
|
||||||
|
initialize,
|
||||||
|
createBrowserEnv,
|
||||||
|
createFileSystem,
|
||||||
|
createNodejsEnv,
|
||||||
|
monkeyPatch,
|
||||||
|
isBrowser,
|
||||||
|
isNodejs
|
||||||
|
}
|
||||||
|
|
||||||
|
initialize()
|
||||||
|
|
||||||
|
export * from './types'
|
|
@ -0,0 +1,9 @@
|
||||||
|
export function isBrowser(): boolean {
|
||||||
|
return typeof window === 'object'
|
||||||
|
&& typeof document !== 'undefined'
|
||||||
|
&& typeof HTMLImageElement !== 'undefined'
|
||||||
|
&& typeof HTMLCanvasElement !== 'undefined'
|
||||||
|
&& typeof HTMLVideoElement !== 'undefined'
|
||||||
|
&& typeof ImageData !== 'undefined'
|
||||||
|
&& typeof CanvasRenderingContext2D !== 'undefined'
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
export function isNodejs(): boolean {
|
||||||
|
return typeof global === 'object'
|
||||||
|
&& typeof require === 'function'
|
||||||
|
&& typeof module !== 'undefined'
|
||||||
|
// issues with gatsby.js: module.exports is undefined
|
||||||
|
// && !!module.exports
|
||||||
|
&& typeof process !== 'undefined' && !!process.version
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
export type FileSystem = {
|
||||||
|
readFile: (filePath: string) => Promise<Buffer>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Environment = FileSystem & {
|
||||||
|
Canvas: typeof HTMLCanvasElement
|
||||||
|
CanvasRenderingContext2D: typeof CanvasRenderingContext2D
|
||||||
|
Image: typeof HTMLImageElement
|
||||||
|
ImageData: typeof ImageData
|
||||||
|
Video: typeof HTMLVideoElement
|
||||||
|
createCanvasElement: () => HTMLCanvasElement
|
||||||
|
createImageElement: () => HTMLImageElement
|
||||||
|
fetch: (url: string, init?: RequestInit) => Promise<Response>
|
||||||
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
export function euclideanDistance(arr1: number[] | Float32Array, arr2: number[] | Float32Array) {
|
||||||
|
if (arr1.length !== arr2.length)
|
||||||
|
throw new Error('euclideanDistance: arr1.length !== arr2.length')
|
||||||
|
|
||||||
|
const desc1 = Array.from(arr1)
|
||||||
|
const desc2 = Array.from(arr2)
|
||||||
|
|
||||||
|
return Math.sqrt(
|
||||||
|
desc1
|
||||||
|
.map((val, i) => val - desc2[i])
|
||||||
|
.reduce((res, diff) => res + Math.pow(diff, 2), 0)
|
||||||
|
)
|
||||||
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { NetInput, TNetInput, toNetInput } from '../dom';
|
||||||
|
import { FaceFeatureExtractor } from '../faceFeatureExtractor/FaceFeatureExtractor';
|
||||||
|
import { FaceFeatureExtractorParams } from '../faceFeatureExtractor/types';
|
||||||
|
import { FaceProcessor } from '../faceProcessor/FaceProcessor';
|
||||||
|
import { FaceExpressions } from './FaceExpressions';
|
||||||
|
|
||||||
|
export class FaceExpressionNet extends FaceProcessor<FaceFeatureExtractorParams> {
|
||||||
|
|
||||||
|
constructor(faceFeatureExtractor: FaceFeatureExtractor = new FaceFeatureExtractor()) {
|
||||||
|
super('FaceExpressionNet', faceFeatureExtractor)
|
||||||
|
}
|
||||||
|
|
||||||
|
public forwardInput(input: NetInput | tf.Tensor4D): tf.Tensor2D {
|
||||||
|
return tf.tidy(() => tf.softmax(this.runNet(input)))
|
||||||
|
}
|
||||||
|
|
||||||
|
public async forward(input: TNetInput): Promise<tf.Tensor2D> {
|
||||||
|
return this.forwardInput(await toNetInput(input))
|
||||||
|
}
|
||||||
|
|
||||||
|
public async predictExpressions(input: TNetInput) {
|
||||||
|
const netInput = await toNetInput(input)
|
||||||
|
const out = await this.forwardInput(netInput)
|
||||||
|
const probabilitesByBatch = await Promise.all(tf.unstack(out).map(async t => {
|
||||||
|
const data = await t.data()
|
||||||
|
t.dispose()
|
||||||
|
return data
|
||||||
|
}))
|
||||||
|
out.dispose()
|
||||||
|
|
||||||
|
const predictionsByBatch = probabilitesByBatch
|
||||||
|
.map(probabilites => new FaceExpressions(probabilites as Float32Array))
|
||||||
|
|
||||||
|
return netInput.isBatchInput
|
||||||
|
? predictionsByBatch
|
||||||
|
: predictionsByBatch[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getDefaultModelName(): string {
|
||||||
|
return 'face_expression_model'
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getClassifierChannelsIn(): number {
|
||||||
|
return 256
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getClassifierChannelsOut(): number {
|
||||||
|
return 7
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,27 @@
|
||||||
|
export const FACE_EXPRESSION_LABELS = ['neutral', 'happy', 'sad', 'angry', 'fearful', 'disgusted', 'surprised']
|
||||||
|
|
||||||
|
export class FaceExpressions {
|
||||||
|
public neutral: number
|
||||||
|
public happy: number
|
||||||
|
public sad: number
|
||||||
|
public angry: number
|
||||||
|
public fearful: number
|
||||||
|
public disgusted: number
|
||||||
|
public surprised: number
|
||||||
|
|
||||||
|
constructor(probabilities: number[] | Float32Array) {
|
||||||
|
if (probabilities.length !== 7) {
|
||||||
|
throw new Error(`FaceExpressions.constructor - expected probabilities.length to be 7, have: ${probabilities.length}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
FACE_EXPRESSION_LABELS.forEach((expression, idx) => {
|
||||||
|
this[expression] = probabilities[idx]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
asSortedArray() {
|
||||||
|
return FACE_EXPRESSION_LABELS
|
||||||
|
.map(expression => ({ expression, probability: this[expression] as number }))
|
||||||
|
.sort((e0, e1) => e1.probability - e0.probability)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * from './FaceExpressionNet';
|
||||||
|
export * from './FaceExpressions';
|
|
@ -0,0 +1,55 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { NetInput, TNetInput, toNetInput } from '../dom';
|
||||||
|
import { NeuralNetwork } from '../NeuralNetwork';
|
||||||
|
import { normalize } from '../ops';
|
||||||
|
import { denseBlock4 } from './denseBlock';
|
||||||
|
import { extractParams } from './extractParams';
|
||||||
|
import { extractParamsFromWeigthMap } from './extractParamsFromWeigthMap';
|
||||||
|
import { FaceFeatureExtractorParams, IFaceFeatureExtractor } from './types';
|
||||||
|
|
||||||
|
export class FaceFeatureExtractor extends NeuralNetwork<FaceFeatureExtractorParams> implements IFaceFeatureExtractor<FaceFeatureExtractorParams> {
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super('FaceFeatureExtractor')
|
||||||
|
}
|
||||||
|
|
||||||
|
public forwardInput(input: NetInput): tf.Tensor4D {
|
||||||
|
|
||||||
|
const { params } = this
|
||||||
|
|
||||||
|
if (!params) {
|
||||||
|
throw new Error('FaceFeatureExtractor - load model before inference')
|
||||||
|
}
|
||||||
|
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const batchTensor = input.toBatchTensor(112, true)
|
||||||
|
const meanRgb = [122.782, 117.001, 104.298]
|
||||||
|
const normalized = normalize(batchTensor, meanRgb).div(tf.scalar(255)) as tf.Tensor4D
|
||||||
|
|
||||||
|
let out = denseBlock4(normalized, params.dense0, true)
|
||||||
|
out = denseBlock4(out, params.dense1)
|
||||||
|
out = denseBlock4(out, params.dense2)
|
||||||
|
out = denseBlock4(out, params.dense3)
|
||||||
|
out = tf.avgPool(out, [7, 7], [2, 2], 'valid')
|
||||||
|
|
||||||
|
return out
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public async forward(input: TNetInput): Promise<tf.Tensor4D> {
|
||||||
|
return this.forwardInput(await toNetInput(input))
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getDefaultModelName(): string {
|
||||||
|
return 'face_feature_extractor_model'
|
||||||
|
}
|
||||||
|
|
||||||
|
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) {
|
||||||
|
return extractParamsFromWeigthMap(weightMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected extractParams(weights: Float32Array) {
|
||||||
|
return extractParams(weights)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,54 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { NetInput, TNetInput, toNetInput } from '../dom';
|
||||||
|
import { NeuralNetwork } from '../NeuralNetwork';
|
||||||
|
import { normalize } from '../ops';
|
||||||
|
import { denseBlock3 } from './denseBlock';
|
||||||
|
import { extractParamsFromWeigthMapTiny } from './extractParamsFromWeigthMapTiny';
|
||||||
|
import { extractParamsTiny } from './extractParamsTiny';
|
||||||
|
import { IFaceFeatureExtractor, TinyFaceFeatureExtractorParams } from './types';
|
||||||
|
|
||||||
|
export class TinyFaceFeatureExtractor extends NeuralNetwork<TinyFaceFeatureExtractorParams> implements IFaceFeatureExtractor<TinyFaceFeatureExtractorParams> {
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super('TinyFaceFeatureExtractor')
|
||||||
|
}
|
||||||
|
|
||||||
|
public forwardInput(input: NetInput): tf.Tensor4D {
|
||||||
|
|
||||||
|
const { params } = this
|
||||||
|
|
||||||
|
if (!params) {
|
||||||
|
throw new Error('TinyFaceFeatureExtractor - load model before inference')
|
||||||
|
}
|
||||||
|
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const batchTensor = input.toBatchTensor(112, true)
|
||||||
|
const meanRgb = [122.782, 117.001, 104.298]
|
||||||
|
const normalized = normalize(batchTensor, meanRgb).div(tf.scalar(255)) as tf.Tensor4D
|
||||||
|
|
||||||
|
let out = denseBlock3(normalized, params.dense0, true)
|
||||||
|
out = denseBlock3(out, params.dense1)
|
||||||
|
out = denseBlock3(out, params.dense2)
|
||||||
|
out = tf.avgPool(out, [14, 14], [2, 2], 'valid')
|
||||||
|
|
||||||
|
return out
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public async forward(input: TNetInput): Promise<tf.Tensor4D> {
|
||||||
|
return this.forwardInput(await toNetInput(input))
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getDefaultModelName(): string {
|
||||||
|
return 'face_feature_extractor_tiny_model'
|
||||||
|
}
|
||||||
|
|
||||||
|
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) {
|
||||||
|
return extractParamsFromWeigthMapTiny(weightMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected extractParams(weights: Float32Array) {
|
||||||
|
return extractParamsTiny(weights)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,55 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { ConvParams, SeparableConvParams } from '../common';
|
||||||
|
import { depthwiseSeparableConv } from '../common/depthwiseSeparableConv';
|
||||||
|
import { DenseBlock3Params, DenseBlock4Params } from './types';
|
||||||
|
|
||||||
|
export function denseBlock3(
|
||||||
|
x: tf.Tensor4D,
|
||||||
|
denseBlockParams: DenseBlock3Params,
|
||||||
|
isFirstLayer: boolean = false
|
||||||
|
): tf.Tensor4D {
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const out1 = tf.relu(
|
||||||
|
isFirstLayer
|
||||||
|
? tf.add(
|
||||||
|
tf.conv2d(x, (denseBlockParams.conv0 as ConvParams).filters, [2, 2], 'same'),
|
||||||
|
denseBlockParams.conv0.bias
|
||||||
|
)
|
||||||
|
: depthwiseSeparableConv(x, denseBlockParams.conv0 as SeparableConvParams, [2, 2])
|
||||||
|
) as tf.Tensor4D
|
||||||
|
const out2 = depthwiseSeparableConv(out1, denseBlockParams.conv1, [1, 1])
|
||||||
|
|
||||||
|
const in3 = tf.relu(tf.add(out1, out2)) as tf.Tensor4D
|
||||||
|
const out3 = depthwiseSeparableConv(in3, denseBlockParams.conv2, [1, 1])
|
||||||
|
|
||||||
|
return tf.relu(tf.add(out1, tf.add(out2, out3))) as tf.Tensor4D
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export function denseBlock4(
|
||||||
|
x: tf.Tensor4D,
|
||||||
|
denseBlockParams: DenseBlock4Params,
|
||||||
|
isFirstLayer: boolean = false,
|
||||||
|
isScaleDown: boolean = true
|
||||||
|
): tf.Tensor4D {
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const out1 = tf.relu(
|
||||||
|
isFirstLayer
|
||||||
|
? tf.add(
|
||||||
|
tf.conv2d(x, (denseBlockParams.conv0 as ConvParams).filters, isScaleDown ? [2, 2] : [1, 1], 'same'),
|
||||||
|
denseBlockParams.conv0.bias
|
||||||
|
)
|
||||||
|
: depthwiseSeparableConv(x, denseBlockParams.conv0 as SeparableConvParams, isScaleDown ? [2, 2] : [1, 1])
|
||||||
|
) as tf.Tensor4D
|
||||||
|
const out2 = depthwiseSeparableConv(out1, denseBlockParams.conv1, [1, 1])
|
||||||
|
|
||||||
|
const in3 = tf.relu(tf.add(out1, out2)) as tf.Tensor4D
|
||||||
|
const out3 = depthwiseSeparableConv(in3, denseBlockParams.conv2, [1, 1])
|
||||||
|
|
||||||
|
const in4 = tf.relu(tf.add(out1, tf.add(out2, out3))) as tf.Tensor4D
|
||||||
|
const out4 = depthwiseSeparableConv(in4, denseBlockParams.conv3, [1, 1])
|
||||||
|
|
||||||
|
return tf.relu(tf.add(out1, tf.add(out2, tf.add(out3, out4)))) as tf.Tensor4D
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
import { extractWeightsFactory, ParamMapping } from '../common';
|
||||||
|
import { extractorsFactory } from './extractorsFactory';
|
||||||
|
import { FaceFeatureExtractorParams } from './types';
|
||||||
|
|
||||||
|
|
||||||
|
export function extractParams(weights: Float32Array): { params: FaceFeatureExtractorParams, paramMappings: ParamMapping[] } {
|
||||||
|
|
||||||
|
const paramMappings: ParamMapping[] = []
|
||||||
|
|
||||||
|
const {
|
||||||
|
extractWeights,
|
||||||
|
getRemainingWeights
|
||||||
|
} = extractWeightsFactory(weights)
|
||||||
|
|
||||||
|
const {
|
||||||
|
extractDenseBlock4Params
|
||||||
|
} = extractorsFactory(extractWeights, paramMappings)
|
||||||
|
|
||||||
|
const dense0 = extractDenseBlock4Params(3, 32, 'dense0', true)
|
||||||
|
const dense1 = extractDenseBlock4Params(32, 64, 'dense1')
|
||||||
|
const dense2 = extractDenseBlock4Params(64, 128, 'dense2')
|
||||||
|
const dense3 = extractDenseBlock4Params(128, 256, 'dense3')
|
||||||
|
|
||||||
|
if (getRemainingWeights().length !== 0) {
|
||||||
|
throw new Error(`weights remaing after extract: ${getRemainingWeights().length}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
paramMappings,
|
||||||
|
params: { dense0, dense1, dense2, dense3 }
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,27 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { disposeUnusedWeightTensors, ParamMapping } from '../common';
|
||||||
|
import { loadParamsFactory } from './loadParamsFactory';
|
||||||
|
import { FaceFeatureExtractorParams } from './types';
|
||||||
|
|
||||||
|
export function extractParamsFromWeigthMap(
|
||||||
|
weightMap: tf.NamedTensorMap
|
||||||
|
): { params: FaceFeatureExtractorParams, paramMappings: ParamMapping[] } {
|
||||||
|
|
||||||
|
const paramMappings: ParamMapping[] = []
|
||||||
|
|
||||||
|
const {
|
||||||
|
extractDenseBlock4Params
|
||||||
|
} = loadParamsFactory(weightMap, paramMappings)
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
dense0: extractDenseBlock4Params('dense0', true),
|
||||||
|
dense1: extractDenseBlock4Params('dense1'),
|
||||||
|
dense2: extractDenseBlock4Params('dense2'),
|
||||||
|
dense3: extractDenseBlock4Params('dense3')
|
||||||
|
}
|
||||||
|
|
||||||
|
disposeUnusedWeightTensors(weightMap, paramMappings)
|
||||||
|
|
||||||
|
return { params, paramMappings }
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { disposeUnusedWeightTensors, ParamMapping } from '../common';
|
||||||
|
import { loadParamsFactory } from './loadParamsFactory';
|
||||||
|
import { TinyFaceFeatureExtractorParams } from './types';
|
||||||
|
|
||||||
|
export function extractParamsFromWeigthMapTiny(
|
||||||
|
weightMap: tf.NamedTensorMap
|
||||||
|
): { params: TinyFaceFeatureExtractorParams, paramMappings: ParamMapping[] } {
|
||||||
|
|
||||||
|
const paramMappings: ParamMapping[] = []
|
||||||
|
|
||||||
|
const {
|
||||||
|
extractDenseBlock3Params
|
||||||
|
} = loadParamsFactory(weightMap, paramMappings)
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
dense0: extractDenseBlock3Params('dense0', true),
|
||||||
|
dense1: extractDenseBlock3Params('dense1'),
|
||||||
|
dense2: extractDenseBlock3Params('dense2')
|
||||||
|
}
|
||||||
|
|
||||||
|
disposeUnusedWeightTensors(weightMap, paramMappings)
|
||||||
|
|
||||||
|
return { params, paramMappings }
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
import { extractWeightsFactory, ParamMapping } from '../common';
|
||||||
|
import { extractorsFactory } from './extractorsFactory';
|
||||||
|
import { TinyFaceFeatureExtractorParams } from './types';
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
export function extractParamsTiny(weights: Float32Array): { params: TinyFaceFeatureExtractorParams, paramMappings: ParamMapping[] } {
|
||||||
|
|
||||||
|
const paramMappings: ParamMapping[] = []
|
||||||
|
|
||||||
|
const {
|
||||||
|
extractWeights,
|
||||||
|
getRemainingWeights
|
||||||
|
} = extractWeightsFactory(weights)
|
||||||
|
|
||||||
|
const {
|
||||||
|
extractDenseBlock3Params
|
||||||
|
} = extractorsFactory(extractWeights, paramMappings)
|
||||||
|
|
||||||
|
const dense0 = extractDenseBlock3Params(3, 32, 'dense0', true)
|
||||||
|
const dense1 = extractDenseBlock3Params(32, 64, 'dense1')
|
||||||
|
const dense2 = extractDenseBlock3Params(64, 128, 'dense2')
|
||||||
|
|
||||||
|
if (getRemainingWeights().length !== 0) {
|
||||||
|
throw new Error(`weights remaing after extract: ${getRemainingWeights().length}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
paramMappings,
|
||||||
|
params: { dense0, dense1, dense2 }
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,38 @@
|
||||||
|
import {
|
||||||
|
extractConvParamsFactory,
|
||||||
|
extractSeparableConvParamsFactory,
|
||||||
|
ExtractWeightsFunction,
|
||||||
|
ParamMapping,
|
||||||
|
} from '../common';
|
||||||
|
import { DenseBlock3Params, DenseBlock4Params } from './types';
|
||||||
|
|
||||||
|
export function extractorsFactory(extractWeights: ExtractWeightsFunction, paramMappings: ParamMapping[]) {
|
||||||
|
|
||||||
|
const extractConvParams = extractConvParamsFactory(extractWeights, paramMappings)
|
||||||
|
const extractSeparableConvParams = extractSeparableConvParamsFactory(extractWeights, paramMappings)
|
||||||
|
|
||||||
|
function extractDenseBlock3Params(channelsIn: number, channelsOut: number, mappedPrefix: string, isFirstLayer: boolean = false): DenseBlock3Params {
|
||||||
|
|
||||||
|
const conv0 = isFirstLayer
|
||||||
|
? extractConvParams(channelsIn, channelsOut, 3, `${mappedPrefix}/conv0`)
|
||||||
|
: extractSeparableConvParams(channelsIn, channelsOut, `${mappedPrefix}/conv0`)
|
||||||
|
const conv1 = extractSeparableConvParams(channelsOut, channelsOut, `${mappedPrefix}/conv1`)
|
||||||
|
const conv2 = extractSeparableConvParams(channelsOut, channelsOut, `${mappedPrefix}/conv2`)
|
||||||
|
|
||||||
|
return { conv0, conv1, conv2 }
|
||||||
|
}
|
||||||
|
|
||||||
|
function extractDenseBlock4Params(channelsIn: number, channelsOut: number, mappedPrefix: string, isFirstLayer: boolean = false): DenseBlock4Params {
|
||||||
|
|
||||||
|
const { conv0, conv1, conv2 } = extractDenseBlock3Params(channelsIn, channelsOut, mappedPrefix, isFirstLayer)
|
||||||
|
const conv3 = extractSeparableConvParams(channelsOut, channelsOut, `${mappedPrefix}/conv3`)
|
||||||
|
|
||||||
|
return { conv0, conv1, conv2, conv3 }
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
extractDenseBlock3Params,
|
||||||
|
extractDenseBlock4Params
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * from './FaceFeatureExtractor';
|
||||||
|
export * from './TinyFaceFeatureExtractor';
|
|
@ -0,0 +1,37 @@
|
||||||
|
import { extractWeightEntryFactory, loadSeparableConvParamsFactory, ParamMapping } from '../common';
|
||||||
|
import { loadConvParamsFactory } from '../common/loadConvParamsFactory';
|
||||||
|
import { DenseBlock3Params, DenseBlock4Params } from './types';
|
||||||
|
|
||||||
|
export function loadParamsFactory(weightMap: any, paramMappings: ParamMapping[]) {
|
||||||
|
|
||||||
|
const extractWeightEntry = extractWeightEntryFactory(weightMap, paramMappings)
|
||||||
|
|
||||||
|
const extractConvParams = loadConvParamsFactory(extractWeightEntry)
|
||||||
|
const extractSeparableConvParams = loadSeparableConvParamsFactory(extractWeightEntry)
|
||||||
|
|
||||||
|
function extractDenseBlock3Params(prefix: string, isFirstLayer: boolean = false): DenseBlock3Params {
|
||||||
|
const conv0 = isFirstLayer
|
||||||
|
? extractConvParams(`${prefix}/conv0`)
|
||||||
|
: extractSeparableConvParams(`${prefix}/conv0`)
|
||||||
|
const conv1 = extractSeparableConvParams(`${prefix}/conv1`)
|
||||||
|
const conv2 = extractSeparableConvParams(`${prefix}/conv2`)
|
||||||
|
|
||||||
|
return { conv0, conv1, conv2 }
|
||||||
|
}
|
||||||
|
|
||||||
|
function extractDenseBlock4Params(prefix: string, isFirstLayer: boolean = false): DenseBlock4Params {
|
||||||
|
const conv0 = isFirstLayer
|
||||||
|
? extractConvParams(`${prefix}/conv0`)
|
||||||
|
: extractSeparableConvParams(`${prefix}/conv0`)
|
||||||
|
const conv1 = extractSeparableConvParams(`${prefix}/conv1`)
|
||||||
|
const conv2 = extractSeparableConvParams(`${prefix}/conv2`)
|
||||||
|
const conv3 = extractSeparableConvParams(`${prefix}/conv3`)
|
||||||
|
|
||||||
|
return { conv0, conv1, conv2, conv3 }
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
extractDenseBlock3Params,
|
||||||
|
extractDenseBlock4Params
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { NetInput, TNetInput } from '..';
|
||||||
|
import { ConvParams, SeparableConvParams } from '../common';
|
||||||
|
import { NeuralNetwork } from '../NeuralNetwork';
|
||||||
|
|
||||||
|
export type ConvWithBatchNormParams = BatchNormParams & {
|
||||||
|
filter: tf.Tensor4D
|
||||||
|
}
|
||||||
|
|
||||||
|
export type BatchNormParams = {
|
||||||
|
mean: tf.Tensor1D
|
||||||
|
variance: tf.Tensor1D
|
||||||
|
scale: tf.Tensor1D
|
||||||
|
offset: tf.Tensor1D
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SeparableConvWithBatchNormParams = {
|
||||||
|
depthwise: ConvWithBatchNormParams
|
||||||
|
pointwise: ConvWithBatchNormParams
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DenseBlock3Params = {
|
||||||
|
conv0: SeparableConvParams | ConvParams
|
||||||
|
conv1: SeparableConvParams
|
||||||
|
conv2: SeparableConvParams
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DenseBlock4Params = DenseBlock3Params & {
|
||||||
|
conv3: SeparableConvParams
|
||||||
|
}
|
||||||
|
|
||||||
|
export type TinyFaceFeatureExtractorParams = {
|
||||||
|
dense0: DenseBlock3Params
|
||||||
|
dense1: DenseBlock3Params
|
||||||
|
dense2: DenseBlock3Params
|
||||||
|
}
|
||||||
|
|
||||||
|
export type FaceFeatureExtractorParams = {
|
||||||
|
dense0: DenseBlock4Params
|
||||||
|
dense1: DenseBlock4Params
|
||||||
|
dense2: DenseBlock4Params
|
||||||
|
dense3: DenseBlock4Params
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IFaceFeatureExtractor<TNetParams extends TinyFaceFeatureExtractorParams | FaceFeatureExtractorParams> extends NeuralNetwork<TNetParams> {
|
||||||
|
forwardInput(input: NetInput): tf.Tensor4D
|
||||||
|
forward(input: TNetInput): Promise<tf.Tensor4D>
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
import { FaceFeatureExtractor } from '../faceFeatureExtractor/FaceFeatureExtractor';
|
||||||
|
import { FaceFeatureExtractorParams } from '../faceFeatureExtractor/types';
|
||||||
|
import { FaceLandmark68NetBase } from './FaceLandmark68NetBase';
|
||||||
|
|
||||||
|
export class FaceLandmark68Net extends FaceLandmark68NetBase<FaceFeatureExtractorParams> {
|
||||||
|
|
||||||
|
constructor(faceFeatureExtractor: FaceFeatureExtractor = new FaceFeatureExtractor()) {
|
||||||
|
super('FaceLandmark68Net', faceFeatureExtractor)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getDefaultModelName(): string {
|
||||||
|
return 'face_landmark_68_model'
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getClassifierChannelsIn(): number {
|
||||||
|
return 256
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,107 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { IDimensions, Point } from '../classes';
|
||||||
|
import { FaceLandmarks68 } from '../classes/FaceLandmarks68';
|
||||||
|
import { NetInput, TNetInput, toNetInput } from '../dom';
|
||||||
|
import { FaceFeatureExtractorParams, TinyFaceFeatureExtractorParams } from '../faceFeatureExtractor/types';
|
||||||
|
import { FaceProcessor } from '../faceProcessor/FaceProcessor';
|
||||||
|
import { isEven } from '../utils';
|
||||||
|
|
||||||
|
export abstract class FaceLandmark68NetBase<
|
||||||
|
TExtractorParams extends FaceFeatureExtractorParams | TinyFaceFeatureExtractorParams
|
||||||
|
>
|
||||||
|
extends FaceProcessor<TExtractorParams> {
|
||||||
|
|
||||||
|
public postProcess(output: tf.Tensor2D, inputSize: number, originalDimensions: IDimensions[]): tf.Tensor2D {
|
||||||
|
|
||||||
|
const inputDimensions = originalDimensions.map(({ width, height }) => {
|
||||||
|
const scale = inputSize / Math.max(height, width)
|
||||||
|
return {
|
||||||
|
width: width * scale,
|
||||||
|
height: height * scale
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const batchSize = inputDimensions.length
|
||||||
|
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const createInterleavedTensor = (fillX: number, fillY: number) =>
|
||||||
|
tf.stack([
|
||||||
|
tf.fill([68], fillX),
|
||||||
|
tf.fill([68], fillY)
|
||||||
|
], 1).as2D(1, 136).as1D()
|
||||||
|
|
||||||
|
const getPadding = (batchIdx: number, cond: (w: number, h: number) => boolean): number => {
|
||||||
|
const { width, height } = inputDimensions[batchIdx]
|
||||||
|
return cond(width, height) ? Math.abs(width - height) / 2 : 0
|
||||||
|
}
|
||||||
|
const getPaddingX = (batchIdx: number) => getPadding(batchIdx, (w, h) => w < h)
|
||||||
|
const getPaddingY = (batchIdx: number) => getPadding(batchIdx, (w, h) => h < w)
|
||||||
|
|
||||||
|
const landmarkTensors = output
|
||||||
|
.mul(tf.fill([batchSize, 136], inputSize))
|
||||||
|
.sub(tf.stack(Array.from(Array(batchSize), (_, batchIdx) =>
|
||||||
|
createInterleavedTensor(
|
||||||
|
getPaddingX(batchIdx),
|
||||||
|
getPaddingY(batchIdx)
|
||||||
|
)
|
||||||
|
)))
|
||||||
|
.div(tf.stack(Array.from(Array(batchSize), (_, batchIdx) =>
|
||||||
|
createInterleavedTensor(
|
||||||
|
inputDimensions[batchIdx].width,
|
||||||
|
inputDimensions[batchIdx].height
|
||||||
|
)
|
||||||
|
)))
|
||||||
|
|
||||||
|
return landmarkTensors as tf.Tensor2D
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public forwardInput(input: NetInput): tf.Tensor2D {
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const out = this.runNet(input)
|
||||||
|
return this.postProcess(
|
||||||
|
out,
|
||||||
|
input.inputSize as number,
|
||||||
|
input.inputDimensions.map(([height, width]) => ({ height, width }))
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public async forward(input: TNetInput): Promise<tf.Tensor2D> {
|
||||||
|
return this.forwardInput(await toNetInput(input))
|
||||||
|
}
|
||||||
|
|
||||||
|
public async detectLandmarks(input: TNetInput): Promise<FaceLandmarks68 | FaceLandmarks68[]> {
|
||||||
|
const netInput = await toNetInput(input)
|
||||||
|
const landmarkTensors = tf.tidy(
|
||||||
|
() => tf.unstack(this.forwardInput(netInput))
|
||||||
|
)
|
||||||
|
|
||||||
|
const landmarksForBatch = await Promise.all(landmarkTensors.map(
|
||||||
|
async (landmarkTensor, batchIdx) => {
|
||||||
|
const landmarksArray = Array.from(await landmarkTensor.data())
|
||||||
|
const xCoords = landmarksArray.filter((_, i) => isEven(i))
|
||||||
|
const yCoords = landmarksArray.filter((_, i) => !isEven(i))
|
||||||
|
|
||||||
|
return new FaceLandmarks68(
|
||||||
|
Array(68).fill(0).map((_, i) => new Point(xCoords[i], yCoords[i])),
|
||||||
|
{
|
||||||
|
height: netInput.getInputHeight(batchIdx),
|
||||||
|
width : netInput.getInputWidth(batchIdx),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
))
|
||||||
|
|
||||||
|
landmarkTensors.forEach(t => t.dispose())
|
||||||
|
|
||||||
|
return netInput.isBatchInput
|
||||||
|
? landmarksForBatch
|
||||||
|
: landmarksForBatch[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getClassifierChannelsOut(): number {
|
||||||
|
return 136
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
import { TinyFaceFeatureExtractor } from '../faceFeatureExtractor/TinyFaceFeatureExtractor';
|
||||||
|
import { TinyFaceFeatureExtractorParams } from '../faceFeatureExtractor/types';
|
||||||
|
import { FaceLandmark68NetBase } from './FaceLandmark68NetBase';
|
||||||
|
|
||||||
|
export class FaceLandmark68TinyNet extends FaceLandmark68NetBase<TinyFaceFeatureExtractorParams> {
|
||||||
|
|
||||||
|
constructor(faceFeatureExtractor: TinyFaceFeatureExtractor = new TinyFaceFeatureExtractor()) {
|
||||||
|
super('FaceLandmark68TinyNet', faceFeatureExtractor)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getDefaultModelName(): string {
|
||||||
|
return 'face_landmark_68_tiny_model'
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getClassifierChannelsIn(): number {
|
||||||
|
return 128
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
import { FaceLandmark68Net } from './FaceLandmark68Net';
|
||||||
|
|
||||||
|
export * from './FaceLandmark68Net';
|
||||||
|
export * from './FaceLandmark68TinyNet';
|
||||||
|
|
||||||
|
export class FaceLandmarkNet extends FaceLandmark68Net {}
|
|
@ -0,0 +1,88 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { fullyConnectedLayer } from '../common/fullyConnectedLayer';
|
||||||
|
import { NetInput } from '../dom';
|
||||||
|
import {
|
||||||
|
FaceFeatureExtractorParams,
|
||||||
|
IFaceFeatureExtractor,
|
||||||
|
TinyFaceFeatureExtractorParams,
|
||||||
|
} from '../faceFeatureExtractor/types';
|
||||||
|
import { NeuralNetwork } from '../NeuralNetwork';
|
||||||
|
import { extractParams } from './extractParams';
|
||||||
|
import { extractParamsFromWeigthMap } from './extractParamsFromWeigthMap';
|
||||||
|
import { NetParams } from './types';
|
||||||
|
import { seperateWeightMaps } from './util';
|
||||||
|
|
||||||
|
export abstract class FaceProcessor<
|
||||||
|
TExtractorParams extends FaceFeatureExtractorParams | TinyFaceFeatureExtractorParams
|
||||||
|
>
|
||||||
|
extends NeuralNetwork<NetParams> {
|
||||||
|
|
||||||
|
protected _faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>
|
||||||
|
|
||||||
|
constructor(_name: string, faceFeatureExtractor: IFaceFeatureExtractor<TExtractorParams>) {
|
||||||
|
super(_name)
|
||||||
|
this._faceFeatureExtractor = faceFeatureExtractor
|
||||||
|
}
|
||||||
|
|
||||||
|
public get faceFeatureExtractor(): IFaceFeatureExtractor<TExtractorParams> {
|
||||||
|
return this._faceFeatureExtractor
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract getDefaultModelName(): string
|
||||||
|
protected abstract getClassifierChannelsIn(): number
|
||||||
|
protected abstract getClassifierChannelsOut(): number
|
||||||
|
|
||||||
|
public runNet(input: NetInput | tf.Tensor4D): tf.Tensor2D {
|
||||||
|
|
||||||
|
const { params } = this
|
||||||
|
|
||||||
|
if (!params) {
|
||||||
|
throw new Error(`${this._name} - load model before inference`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tf.tidy(() => {
|
||||||
|
const bottleneckFeatures = input instanceof NetInput
|
||||||
|
? this.faceFeatureExtractor.forwardInput(input)
|
||||||
|
: input
|
||||||
|
return fullyConnectedLayer(bottleneckFeatures.as2D(bottleneckFeatures.shape[0], -1), params.fc)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public dispose(throwOnRedispose: boolean = true) {
|
||||||
|
this.faceFeatureExtractor.dispose(throwOnRedispose)
|
||||||
|
super.dispose(throwOnRedispose)
|
||||||
|
}
|
||||||
|
|
||||||
|
public loadClassifierParams(weights: Float32Array) {
|
||||||
|
const { params, paramMappings } = this.extractClassifierParams(weights)
|
||||||
|
this._params = params
|
||||||
|
this._paramMappings = paramMappings
|
||||||
|
}
|
||||||
|
|
||||||
|
public extractClassifierParams(weights: Float32Array) {
|
||||||
|
return extractParams(weights, this.getClassifierChannelsIn(), this.getClassifierChannelsOut())
|
||||||
|
}
|
||||||
|
|
||||||
|
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) {
|
||||||
|
|
||||||
|
const { featureExtractorMap, classifierMap } = seperateWeightMaps(weightMap)
|
||||||
|
|
||||||
|
this.faceFeatureExtractor.loadFromWeightMap(featureExtractorMap)
|
||||||
|
|
||||||
|
return extractParamsFromWeigthMap(classifierMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected extractParams(weights: Float32Array) {
|
||||||
|
|
||||||
|
const cIn = this.getClassifierChannelsIn()
|
||||||
|
const cOut = this.getClassifierChannelsOut()
|
||||||
|
const classifierWeightSize = (cOut * cIn ) + cOut
|
||||||
|
|
||||||
|
const featureExtractorWeights = weights.slice(0, weights.length - classifierWeightSize)
|
||||||
|
const classifierWeights = weights.slice(weights.length - classifierWeightSize)
|
||||||
|
|
||||||
|
this.faceFeatureExtractor.extractWeights(featureExtractorWeights)
|
||||||
|
return this.extractClassifierParams(classifierWeights)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
import { extractFCParamsFactory, extractWeightsFactory, ParamMapping } from '../common';
|
||||||
|
import { NetParams } from './types';
|
||||||
|
|
||||||
|
export function extractParams(weights: Float32Array, channelsIn: number, channelsOut: number): { params: NetParams, paramMappings: ParamMapping[] } {
|
||||||
|
|
||||||
|
const paramMappings: ParamMapping[] = []
|
||||||
|
|
||||||
|
const {
|
||||||
|
extractWeights,
|
||||||
|
getRemainingWeights
|
||||||
|
} = extractWeightsFactory(weights)
|
||||||
|
|
||||||
|
const extractFCParams = extractFCParamsFactory(extractWeights, paramMappings)
|
||||||
|
|
||||||
|
const fc = extractFCParams(channelsIn, channelsOut, 'fc')
|
||||||
|
|
||||||
|
if (getRemainingWeights().length !== 0) {
|
||||||
|
throw new Error(`weights remaing after extract: ${getRemainingWeights().length}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
paramMappings,
|
||||||
|
params: { fc }
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,27 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
import { disposeUnusedWeightTensors, extractWeightEntryFactory, FCParams, ParamMapping } from '../common';
|
||||||
|
import { NetParams } from './types';
|
||||||
|
|
||||||
|
export function extractParamsFromWeigthMap(
|
||||||
|
weightMap: tf.NamedTensorMap
|
||||||
|
): { params: NetParams, paramMappings: ParamMapping[] } {
|
||||||
|
|
||||||
|
const paramMappings: ParamMapping[] = []
|
||||||
|
|
||||||
|
const extractWeightEntry = extractWeightEntryFactory(weightMap, paramMappings)
|
||||||
|
|
||||||
|
function extractFcParams(prefix: string): FCParams {
|
||||||
|
const weights = extractWeightEntry<tf.Tensor2D>(`${prefix}/weights`, 2)
|
||||||
|
const bias = extractWeightEntry<tf.Tensor1D>(`${prefix}/bias`, 1)
|
||||||
|
return { weights, bias }
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
fc: extractFcParams('fc')
|
||||||
|
}
|
||||||
|
|
||||||
|
disposeUnusedWeightTensors(weightMap, paramMappings)
|
||||||
|
|
||||||
|
return { params, paramMappings }
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
export * from './FaceProcessor';
|
|
@ -0,0 +1,6 @@
|
||||||
|
import { FCParams } from '../common';
|
||||||
|
|
||||||
|
export type NetParams = {
|
||||||
|
fc: FCParams
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
import * as tf from '@tensorflow/tfjs-core';
|
||||||
|
|
||||||
|
export function seperateWeightMaps(weightMap: tf.NamedTensorMap) {
|
||||||
|
|
||||||
|
const featureExtractorMap: tf.NamedTensorMap = {}
|
||||||
|
const classifierMap: tf.NamedTensorMap = {}
|
||||||
|
|
||||||
|
Object.keys(weightMap).forEach(key => {
|
||||||
|
const map = key.startsWith('fc') ? classifierMap : featureExtractorMap
|
||||||
|
map[key] = weightMap[key]
|
||||||
|
})
|
||||||
|
|
||||||
|
return { featureExtractorMap, classifierMap }
|
||||||
|
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue