mirror of https://github.com/vladmandic/human
full eslint rule rewrite
parent
c10c919f1a
commit
fc5f90b639
223
.eslintrc.json
223
.eslintrc.json
|
@ -1,93 +1,146 @@
|
||||||
{
|
{
|
||||||
"globals": {},
|
"globals": {},
|
||||||
"env": {
|
"overrides": [
|
||||||
"browser": true,
|
{
|
||||||
"commonjs": true,
|
"files": ["**/*.ts"],
|
||||||
"node": true,
|
"env": {
|
||||||
"es2021": true
|
"browser": true,
|
||||||
},
|
"commonjs": false,
|
||||||
"parser": "@typescript-eslint/parser",
|
"node": false,
|
||||||
"parserOptions": {
|
"es2021": true
|
||||||
"ecmaVersion": 2021
|
},
|
||||||
},
|
"parser": "@typescript-eslint/parser",
|
||||||
"plugins": [
|
"parserOptions": {
|
||||||
"@typescript-eslint",
|
"ecmaVersion": "latest",
|
||||||
"html"
|
"project": ["./tsconfig.json"]
|
||||||
],
|
},
|
||||||
"extends": [
|
"plugins": [
|
||||||
"airbnb-base",
|
"@typescript-eslint"
|
||||||
"eslint:recommended",
|
],
|
||||||
"plugin:@typescript-eslint/eslint-recommended",
|
"extends": [
|
||||||
"plugin:@typescript-eslint/recommended",
|
"airbnb-base",
|
||||||
"plugin:import/errors",
|
"eslint:recommended",
|
||||||
"plugin:import/warnings",
|
"plugin:@typescript-eslint/eslint-recommended",
|
||||||
"plugin:json/recommended-with-comments",
|
"plugin:@typescript-eslint/recommended",
|
||||||
"plugin:node/recommended",
|
"plugin:import/recommended",
|
||||||
"plugin:promise/recommended"
|
"plugin:promise/recommended"
|
||||||
|
],
|
||||||
|
"rules": {
|
||||||
|
"@typescript-eslint/ban-ts-comment":"off",
|
||||||
|
"@typescript-eslint/no-empty-interface":"off",
|
||||||
|
"@typescript-eslint/no-inferrable-types":"off",
|
||||||
|
"dot-notation":"off",
|
||||||
|
"guard-for-in":"off",
|
||||||
|
"import/extensions": ["off", "always"],
|
||||||
|
"import/no-unresolved":"off",
|
||||||
|
"import/prefer-default-export":"off",
|
||||||
|
"lines-between-class-members":"off",
|
||||||
|
"max-len": [1, 275, 3],
|
||||||
|
"no-async-promise-executor":"off",
|
||||||
|
"no-await-in-loop":"off",
|
||||||
|
"no-bitwise":"off",
|
||||||
|
"no-continue":"off",
|
||||||
|
"no-lonely-if":"off",
|
||||||
|
"no-mixed-operators":"off",
|
||||||
|
"no-param-reassign":"off",
|
||||||
|
"no-plusplus":"off",
|
||||||
|
"no-regex-spaces":"off",
|
||||||
|
"no-restricted-syntax":"off",
|
||||||
|
"no-return-assign":"off",
|
||||||
|
"object-curly-newline":"off",
|
||||||
|
"prefer-destructuring":"off",
|
||||||
|
"prefer-template":"off",
|
||||||
|
"radix":"off"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["**/*.js"],
|
||||||
|
"env": {
|
||||||
|
"browser": true,
|
||||||
|
"commonjs": true,
|
||||||
|
"node": true,
|
||||||
|
"es2021": true
|
||||||
|
},
|
||||||
|
"parserOptions": {
|
||||||
|
"sourceType": "module",
|
||||||
|
"ecmaVersion": "latest"
|
||||||
|
},
|
||||||
|
"plugins": [
|
||||||
|
],
|
||||||
|
"extends": [
|
||||||
|
"airbnb-base",
|
||||||
|
"eslint:recommended",
|
||||||
|
"plugin:node/recommended",
|
||||||
|
"plugin:promise/recommended"
|
||||||
|
],
|
||||||
|
"rules": {
|
||||||
|
"dot-notation":"off",
|
||||||
|
"import/extensions": ["error", "always"],
|
||||||
|
"import/no-extraneous-dependencies":"off",
|
||||||
|
"max-len": [1, 275, 3],
|
||||||
|
"no-await-in-loop":"off",
|
||||||
|
"no-bitwise":"off",
|
||||||
|
"no-continue":"off",
|
||||||
|
"no-mixed-operators":"off",
|
||||||
|
"no-param-reassign":"off",
|
||||||
|
"no-plusplus":"off",
|
||||||
|
"no-regex-spaces":"off",
|
||||||
|
"no-restricted-syntax":"off",
|
||||||
|
"no-return-assign":"off",
|
||||||
|
"node/no-unsupported-features/es-syntax":"off",
|
||||||
|
"object-curly-newline":"off",
|
||||||
|
"prefer-destructuring":"off",
|
||||||
|
"prefer-template":"off",
|
||||||
|
"radix":"off"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["**/*.json"],
|
||||||
|
"env": {
|
||||||
|
"browser": false,
|
||||||
|
"commonjs": false,
|
||||||
|
"node": false,
|
||||||
|
"es2021": false
|
||||||
|
},
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": "latest"
|
||||||
|
},
|
||||||
|
"plugins": [
|
||||||
|
"json"
|
||||||
|
],
|
||||||
|
"extends": [
|
||||||
|
"plugin:json/recommended"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["**/*.html"],
|
||||||
|
"env": {
|
||||||
|
"browser": true,
|
||||||
|
"commonjs": false,
|
||||||
|
"node": false,
|
||||||
|
"es2021": false
|
||||||
|
},
|
||||||
|
"parserOptions": {
|
||||||
|
"sourceType": "module",
|
||||||
|
"ecmaVersion": "latest"
|
||||||
|
},
|
||||||
|
"parser": "@html-eslint/parser",
|
||||||
|
"extends": ["plugin:@html-eslint/recommended"],
|
||||||
|
"plugins": [
|
||||||
|
"html", "@html-eslint"
|
||||||
|
],
|
||||||
|
"rules": {
|
||||||
|
"@html-eslint/indent": ["error", 2],
|
||||||
|
"@html-eslint/element-newline":"off"
|
||||||
|
}
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"ignorePatterns": [
|
"ignorePatterns": [
|
||||||
|
"node_modules",
|
||||||
"assets",
|
"assets",
|
||||||
|
"dist",
|
||||||
"demo/helpers/*.js",
|
"demo/helpers/*.js",
|
||||||
"demo/typescript/*.js",
|
"demo/typescript/*.js",
|
||||||
"demo/faceid/*.js",
|
"demo/faceid/*.js"
|
||||||
"dist",
|
]
|
||||||
"media",
|
|
||||||
"models",
|
|
||||||
"node_modules",
|
|
||||||
"types/human.d.ts"
|
|
||||||
],
|
|
||||||
"rules": {
|
|
||||||
"@typescript-eslint/ban-ts-comment": "off",
|
|
||||||
"@typescript-eslint/explicit-module-boundary-types": "off",
|
|
||||||
"@typescript-eslint/no-shadow": "error",
|
|
||||||
"@typescript-eslint/no-var-requires": "off",
|
|
||||||
"@typescript-eslint/prefer-as-const": "off",
|
|
||||||
"@typescript-eslint/triple-slash-reference": "off",
|
|
||||||
"@typescript-eslint/no-inferrable-types": "off",
|
|
||||||
"@typescript-eslint/no-empty-interface": ["error", { "allowSingleExtends": true }],
|
|
||||||
"camelcase": "off",
|
|
||||||
"class-methods-use-this": "off",
|
|
||||||
"dot-notation": "off",
|
|
||||||
"func-names": "off",
|
|
||||||
"guard-for-in": "off",
|
|
||||||
"import/extensions": "off",
|
|
||||||
"import/named": "off",
|
|
||||||
"import/no-extraneous-dependencies": "off",
|
|
||||||
"import/no-named-as-default": "off",
|
|
||||||
"import/no-unresolved": "off",
|
|
||||||
"import/prefer-default-export": "off",
|
|
||||||
"lines-between-class-members": "off",
|
|
||||||
"max-len": [1, 275, 3],
|
|
||||||
"newline-per-chained-call": "off",
|
|
||||||
"no-async-promise-executor": "off",
|
|
||||||
"no-await-in-loop": "off",
|
|
||||||
"no-bitwise": "off",
|
|
||||||
"no-case-declarations":"off",
|
|
||||||
"no-continue": "off",
|
|
||||||
"no-else-return": "off",
|
|
||||||
"no-lonely-if": "off",
|
|
||||||
"no-loop-func": "off",
|
|
||||||
"no-mixed-operators": "off",
|
|
||||||
"no-param-reassign":"off",
|
|
||||||
"no-plusplus": "off",
|
|
||||||
"no-process-exit": "off",
|
|
||||||
"no-regex-spaces": "off",
|
|
||||||
"no-restricted-globals": "off",
|
|
||||||
"no-restricted-syntax": "off",
|
|
||||||
"no-return-assign": "off",
|
|
||||||
"no-shadow": "off",
|
|
||||||
"no-underscore-dangle": "off",
|
|
||||||
"node/no-missing-import": ["error", { "tryExtensions": [".js", ".json", ".ts"] }],
|
|
||||||
"node/no-unpublished-import": "off",
|
|
||||||
"node/no-unpublished-require": "off",
|
|
||||||
"node/no-unsupported-features/es-syntax": "off",
|
|
||||||
"node/shebang": "off",
|
|
||||||
"object-curly-newline": "off",
|
|
||||||
"prefer-destructuring": "off",
|
|
||||||
"prefer-template":"off",
|
|
||||||
"promise/always-return": "off",
|
|
||||||
"promise/catch-or-return": "off",
|
|
||||||
"promise/no-nesting": "off",
|
|
||||||
"radix": "off"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,11 +9,12 @@
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
|
### **HEAD -> main** 2022/08/20 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
### **2.9.4** 2022/08/20 mandic00@live.com
|
### **2.9.4** 2022/08/20 mandic00@live.com
|
||||||
|
|
||||||
|
- add browser test
|
||||||
### **origin/main** 2022/08/19 mandic00@live.com
|
|
||||||
|
|
||||||
- add tensorflow library detection
|
- add tensorflow library detection
|
||||||
- fix wasm detection
|
- fix wasm detection
|
||||||
- enumerate additional models
|
- enumerate additional models
|
||||||
|
|
14
build.js
14
build.js
|
@ -1,10 +1,10 @@
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
const Build = require('@vladmandic/build').Build;
|
const Build = require('@vladmandic/build').Build; // eslint-disable-line node/no-unpublished-require
|
||||||
const APIExtractor = require('@microsoft/api-extractor');
|
const APIExtractor = require('@microsoft/api-extractor'); // eslint-disable-line node/no-unpublished-require
|
||||||
const tf = require('@tensorflow/tfjs-node');
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
const package = require('./package.json');
|
const packageJSON = require('./package.json');
|
||||||
|
|
||||||
const logFile = 'test/build.log';
|
const logFile = 'test/build.log';
|
||||||
const modelsOut = 'models/models.json';
|
const modelsOut = 'models/models.json';
|
||||||
|
@ -19,7 +19,7 @@ const modelsFolders = [
|
||||||
'../nanodet/models',
|
'../nanodet/models',
|
||||||
];
|
];
|
||||||
|
|
||||||
const apiExtractorIgnoreList = [ // eslint-disable-line @typescript-eslint/no-unused-vars
|
const apiExtractorIgnoreList = [ // eslint-disable-line no-unused-vars
|
||||||
'ae-missing-release-tag',
|
'ae-missing-release-tag',
|
||||||
'tsdoc-param-tag-missing-hyphen',
|
'tsdoc-param-tag-missing-hyphen',
|
||||||
'tsdoc-escape-right-brace',
|
'tsdoc-escape-right-brace',
|
||||||
|
@ -73,7 +73,7 @@ async function analyzeModels() {
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
log.logFile(logFile);
|
log.logFile(logFile);
|
||||||
log.data('Build', { name: package.name, version: package.version });
|
log.data('Build', { name: packageJSON.name, version: packageJSON.version });
|
||||||
// run production build
|
// run production build
|
||||||
const build = new Build();
|
const build = new Build();
|
||||||
await build.run('production');
|
await build.run('production');
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -7,7 +7,7 @@
|
||||||
* @license MIT
|
* @license MIT
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { Human, TensorLike, FaceResult } from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
|
import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
|
||||||
import * as indexDb from './indexdb'; // methods to deal with indexdb
|
import * as indexDb from './indexdb'; // methods to deal with indexdb
|
||||||
|
|
||||||
const humanConfig = { // user configuration for human, used to fine-tune behavior
|
const humanConfig = { // user configuration for human, used to fine-tune behavior
|
||||||
|
@ -58,7 +58,7 @@ const ok = { // must meet all rules
|
||||||
elapsedMs: 0, // total time while waiting for valid face
|
elapsedMs: 0, // total time while waiting for valid face
|
||||||
};
|
};
|
||||||
const allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.lookingCenter && ok.faceConfidence && ok.antispoofCheck && ok.livenessCheck;
|
const allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.lookingCenter && ok.faceConfidence && ok.antispoofCheck && ok.livenessCheck;
|
||||||
const current: { face: FaceResult | null, record: indexDb.FaceRecord | null } = { face: null, record: null }; // current face record and matched database record
|
const current: { face: H.FaceResult | null, record: indexDb.FaceRecord | null } = { face: null, record: null }; // current face record and matched database record
|
||||||
|
|
||||||
const blink = { // internal timers for blink start/end/duration
|
const blink = { // internal timers for blink start/end/duration
|
||||||
start: 0,
|
start: 0,
|
||||||
|
@ -67,9 +67,9 @@ const blink = { // internal timers for blink start/end/duration
|
||||||
};
|
};
|
||||||
|
|
||||||
// let db: Array<{ name: string, source: string, embedding: number[] }> = []; // holds loaded face descriptor database
|
// let db: Array<{ name: string, source: string, embedding: number[] }> = []; // holds loaded face descriptor database
|
||||||
const human = new Human(humanConfig); // create instance of human with overrides from user configuration
|
const human = new H.Human(humanConfig); // create instance of human with overrides from user configuration
|
||||||
|
|
||||||
human.env['perfadd'] = false; // is performance data showing instant or total values
|
human.env.perfadd = false; // is performance data showing instant or total values
|
||||||
human.draw.options.font = 'small-caps 18px "Lato"'; // set font used to draw labels when using draw methods
|
human.draw.options.font = 'small-caps 18px "Lato"'; // set font used to draw labels when using draw methods
|
||||||
human.draw.options.lineHeight = 20;
|
human.draw.options.lineHeight = 20;
|
||||||
|
|
||||||
|
@ -92,8 +92,7 @@ let startTime = 0;
|
||||||
|
|
||||||
const log = (...msg) => { // helper method to output messages
|
const log = (...msg) => { // helper method to output messages
|
||||||
dom.log.innerText += msg.join(' ') + '\n';
|
dom.log.innerText += msg.join(' ') + '\n';
|
||||||
// eslint-disable-next-line no-console
|
console.log(...msg); // eslint-disable-line no-console
|
||||||
console.log(...msg);
|
|
||||||
};
|
};
|
||||||
const printFPS = (msg) => dom.fps.innerText = msg; // print status element
|
const printFPS = (msg) => dom.fps.innerText = msg; // print status element
|
||||||
|
|
||||||
|
@ -126,7 +125,7 @@ async function detectionLoop() { // main detection loop
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function validationLoop(): Promise<FaceResult> { // main screen refresh loop
|
async function validationLoop(): Promise<H.FaceResult> { // main screen refresh loop
|
||||||
const interpolated = await human.next(human.result); // smoothen result using last-known results
|
const interpolated = await human.next(human.result); // smoothen result using last-known results
|
||||||
await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen
|
await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen
|
||||||
await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.
|
await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.
|
||||||
|
@ -136,7 +135,7 @@ async function validationLoop(): Promise<FaceResult> { // main screen refresh lo
|
||||||
printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status
|
printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status
|
||||||
ok.faceCount = human.result.face.length === 1; // must be exactly detected face
|
ok.faceCount = human.result.face.length === 1; // must be exactly detected face
|
||||||
if (ok.faceCount) { // skip the rest if no face
|
if (ok.faceCount) { // skip the rest if no face
|
||||||
const gestures: string[] = Object.values(human.result.gesture).map((gesture) => gesture.gesture); // flatten all gestures
|
const gestures: string[] = Object.values(human.result.gesture).map((gesture) => (gesture as H.GestureResult).gesture); // flatten all gestures
|
||||||
if (gestures.includes('blink left eye') || gestures.includes('blink right eye')) blink.start = human.now(); // blink starts when eyes get closed
|
if (gestures.includes('blink left eye') || gestures.includes('blink right eye')) blink.start = human.now(); // blink starts when eyes get closed
|
||||||
if (blink.start > 0 && !gestures.includes('blink left eye') && !gestures.includes('blink right eye')) blink.end = human.now(); // if blink started how long until eyes are back open
|
if (blink.start > 0 && !gestures.includes('blink left eye') && !gestures.includes('blink right eye')) blink.end = human.now(); // if blink started how long until eyes are back open
|
||||||
ok.blinkDetected = ok.blinkDetected || (Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax);
|
ok.blinkDetected = ok.blinkDetected || (Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax);
|
||||||
|
@ -169,15 +168,15 @@ async function validationLoop(): Promise<FaceResult> { // main screen refresh lo
|
||||||
if (ok.elapsedMs > options.maxTime) { // give up
|
if (ok.elapsedMs > options.maxTime) { // give up
|
||||||
dom.video.pause();
|
dom.video.pause();
|
||||||
return human.result.face[0];
|
return human.result.face[0];
|
||||||
} else { // run again
|
|
||||||
ok.elapsedMs = Math.trunc(human.now() - startTime);
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
setTimeout(async () => {
|
|
||||||
const res = await validationLoop(); // run validation loop until conditions are met
|
|
||||||
if (res) resolve(human.result.face[0]); // recursive promise resolve
|
|
||||||
}, 30); // use to slow down refresh from max refresh rate to target of 30 fps
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
// run again
|
||||||
|
ok.elapsedMs = Math.trunc(human.now() - startTime);
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
setTimeout(async () => {
|
||||||
|
const res = await validationLoop(); // run validation loop until conditions are met
|
||||||
|
if (res) resolve(human.result.face[0]); // recursive promise resolve
|
||||||
|
}, 30); // use to slow down refresh from max refresh rate to target of 30 fps
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function saveRecords() {
|
async function saveRecords() {
|
||||||
|
@ -201,9 +200,8 @@ async function deleteRecord() {
|
||||||
async function detectFace() {
|
async function detectFace() {
|
||||||
dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize);
|
dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize);
|
||||||
if (!current.face || !current.face.tensor || !current.face.embedding) return false;
|
if (!current.face || !current.face.tensor || !current.face.embedding) return false;
|
||||||
// eslint-disable-next-line no-console
|
console.log('face record:', current.face); // eslint-disable-line no-console
|
||||||
console.log('face record:', current.face);
|
human.tf.browser.toPixels(current.face.tensor as unknown as H.TensorLike, dom.canvas);
|
||||||
human.tf.browser.toPixels(current.face.tensor as unknown as TensorLike, dom.canvas);
|
|
||||||
if (await indexDb.count() === 0) {
|
if (await indexDb.count() === 0) {
|
||||||
log('face database is empty');
|
log('face database is empty');
|
||||||
document.body.style.background = 'black';
|
document.body.style.background = 'black';
|
||||||
|
@ -241,8 +239,8 @@ async function main() { // main entry point
|
||||||
await detectionLoop(); // start detection loop
|
await detectionLoop(); // start detection loop
|
||||||
startTime = human.now();
|
startTime = human.now();
|
||||||
current.face = await validationLoop(); // start validation loop
|
current.face = await validationLoop(); // start validation loop
|
||||||
dom.canvas.width = current.face?.tensor?.shape[1] || options.minSize;
|
dom.canvas.width = current.face.tensor?.shape[1] || options.minSize;
|
||||||
dom.canvas.height = current.face?.tensor?.shape[0] || options.minSize;
|
dom.canvas.height = current.face.tensor?.shape[0] || options.minSize;
|
||||||
dom.source.width = dom.canvas.width;
|
dom.source.width = dom.canvas.width;
|
||||||
dom.source.height = dom.canvas.height;
|
dom.source.height = dom.canvas.height;
|
||||||
dom.canvas.style.width = '';
|
dom.canvas.style.width = '';
|
||||||
|
@ -253,14 +251,13 @@ async function main() { // main entry point
|
||||||
if (!allOk()) { // is all criteria met?
|
if (!allOk()) { // is all criteria met?
|
||||||
log('did not find valid face');
|
log('did not find valid face');
|
||||||
return false;
|
return false;
|
||||||
} else {
|
|
||||||
return detectFace();
|
|
||||||
}
|
}
|
||||||
|
return detectFace();
|
||||||
}
|
}
|
||||||
|
|
||||||
async function init() {
|
async function init() {
|
||||||
log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);
|
log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);
|
||||||
log('face embedding model:', humanConfig.face['description']?.enabled ? 'faceres' : '', humanConfig.face['mobilefacenet']?.enabled ? 'mobilefacenet' : '', humanConfig.face['insightface']?.enabled ? 'insightface' : '');
|
log('face embedding model:', humanConfig.face.description.enabled ? 'faceres' : '', humanConfig.face['mobilefacenet']?.enabled ? 'mobilefacenet' : '', humanConfig.face['insightface']?.enabled ? 'insightface' : '');
|
||||||
log('options:', JSON.stringify(options).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ' '));
|
log('options:', JSON.stringify(options).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ' '));
|
||||||
printFPS('loading...');
|
printFPS('loading...');
|
||||||
log('known face records:', await indexDb.count());
|
log('known face records:', await indexDb.count());
|
||||||
|
|
|
@ -3,10 +3,9 @@ let db: IDBDatabase; // instance of indexdb
|
||||||
const database = 'human';
|
const database = 'human';
|
||||||
const table = 'person';
|
const table = 'person';
|
||||||
|
|
||||||
export type FaceRecord = { id: number, name: string, descriptor: number[], image: ImageData };
|
export interface FaceRecord { id: number, name: string, descriptor: number[], image: ImageData }
|
||||||
|
|
||||||
// eslint-disable-next-line no-console
|
const log = (...msg) => console.log('indexdb', ...msg); // eslint-disable-line no-console
|
||||||
const log = (...msg) => console.log('indexdb', ...msg);
|
|
||||||
|
|
||||||
export async function open() {
|
export async function open() {
|
||||||
if (db) return true;
|
if (db) return true;
|
||||||
|
@ -19,7 +18,7 @@ export async function open() {
|
||||||
db.createObjectStore(table, { keyPath: 'id', autoIncrement: true });
|
db.createObjectStore(table, { keyPath: 'id', autoIncrement: true });
|
||||||
};
|
};
|
||||||
request.onsuccess = (evt) => { // open
|
request.onsuccess = (evt) => { // open
|
||||||
db = (evt.target as IDBOpenDBRequest).result as IDBDatabase;
|
db = (evt.target as IDBOpenDBRequest).result;
|
||||||
log('open:', db);
|
log('open:', db);
|
||||||
resolve(true);
|
resolve(true);
|
||||||
};
|
};
|
||||||
|
@ -27,7 +26,7 @@ export async function open() {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function load(): Promise<FaceRecord[]> {
|
export async function load(): Promise<FaceRecord[]> {
|
||||||
const faceDB: Array<FaceRecord> = [];
|
const faceDB: FaceRecord[] = [];
|
||||||
if (!db) await open(); // open or create if not already done
|
if (!db) await open(); // open or create if not already done
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
const cursor: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).openCursor(null, 'next');
|
const cursor: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).openCursor(null, 'next');
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
// @ts-nocheck
|
|
||||||
/**
|
/**
|
||||||
* Human demo for browsers
|
* Human demo for browsers
|
||||||
*
|
*
|
||||||
|
@ -6,7 +5,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/** @type {Human} */
|
/** @type {Human} */
|
||||||
import Human from '../../dist/human.esm.js';
|
import { Human } from '../../dist/human.esm.js';
|
||||||
|
|
||||||
const userConfig = {
|
const userConfig = {
|
||||||
backend: 'humangl',
|
backend: 'humangl',
|
||||||
|
@ -46,8 +45,7 @@ const minScore = 0.4;
|
||||||
function log(...msg) {
|
function log(...msg) {
|
||||||
const dt = new Date();
|
const dt = new Date();
|
||||||
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
||||||
// eslint-disable-next-line no-console
|
console.log(ts, ...msg); // eslint-disable-line no-console
|
||||||
console.log(ts, ...msg);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function title(msg) {
|
function title(msg) {
|
||||||
|
@ -181,10 +179,12 @@ async function AddImageElement(index, image, length) {
|
||||||
const img = new Image(128, 128);
|
const img = new Image(128, 128);
|
||||||
img.onload = () => { // must wait until image is loaded
|
img.onload = () => { // must wait until image is loaded
|
||||||
document.getElementById('images').appendChild(img); // and finally we can add it
|
document.getElementById('images').appendChild(img); // and finally we can add it
|
||||||
human.detect(img, userConfig).then((res) => {
|
human.detect(img, userConfig)
|
||||||
AddFaceCanvas(index, res, image); // then wait until image is analyzed
|
.then((res) => { // eslint-disable-line promise/always-return
|
||||||
resolve(true);
|
AddFaceCanvas(index, res, image); // then wait until image is analyzed
|
||||||
});
|
resolve(true);
|
||||||
|
})
|
||||||
|
.catch(() => log('human detect error'));
|
||||||
};
|
};
|
||||||
img.onerror = () => {
|
img.onerror = () => {
|
||||||
log('Add image error:', index + 1, image);
|
log('Add image error:', index + 1, image);
|
||||||
|
|
|
@ -45,6 +45,6 @@
|
||||||
<div id="list" style="height: 10px"></div>
|
<div id="list" style="height: 10px"></div>
|
||||||
<div class="text">Select person to sort by similarity and get a known face match</div>
|
<div class="text">Select person to sort by similarity and get a known face match</div>
|
||||||
<div id="faces" style="height: 50vh; overflow-y: auto"></div>
|
<div id="faces" style="height: 50vh; overflow-y: auto"></div>
|
||||||
</div>
|
</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -52,7 +52,7 @@ threads.parentPort?.on('message', (msg) => {
|
||||||
if (msg instanceof SharedArrayBuffer) { // called only once to receive reference to shared array buffer
|
if (msg instanceof SharedArrayBuffer) { // called only once to receive reference to shared array buffer
|
||||||
buffer = msg;
|
buffer = msg;
|
||||||
view = new Float32Array(buffer); // initialize f64 view into buffer
|
view = new Float32Array(buffer); // initialize f64 view into buffer
|
||||||
if (debug) threads.parentPort?.postMessage(`buffer: ${buffer?.byteLength}`);
|
if (debug) threads.parentPort?.postMessage(`buffer: ${buffer.byteLength}`);
|
||||||
}
|
}
|
||||||
if (typeof msg.records !== 'undefined') { // recived every time when number of records changes
|
if (typeof msg.records !== 'undefined') { // recived every time when number of records changes
|
||||||
records = msg.records;
|
records = msg.records;
|
||||||
|
@ -68,7 +68,7 @@ threads.parentPort?.on('message', (msg) => {
|
||||||
}
|
}
|
||||||
if (typeof msg.shutdown !== 'undefined') { // got message to close worker
|
if (typeof msg.shutdown !== 'undefined') { // got message to close worker
|
||||||
if (debug) threads.parentPort?.postMessage('shutting down');
|
if (debug) threads.parentPort?.postMessage('shutting down');
|
||||||
process.exit(0);
|
process.exit(0); // eslint-disable-line no-process-exit
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -6,8 +6,8 @@
|
||||||
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const log = require('@vladmandic/pilogger');
|
|
||||||
const threads = require('worker_threads');
|
const threads = require('worker_threads');
|
||||||
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
|
|
||||||
// global optinos
|
// global optinos
|
||||||
const options = {
|
const options = {
|
||||||
|
@ -160,7 +160,7 @@ async function createBuffer() {
|
||||||
data.buffer = new SharedArrayBuffer(4 * options.dbMax * options.descLength); // preallocate max number of records as sharedarraybuffers cannot grow
|
data.buffer = new SharedArrayBuffer(4 * options.dbMax * options.descLength); // preallocate max number of records as sharedarraybuffers cannot grow
|
||||||
data.view = new Float32Array(data.buffer); // create view into buffer
|
data.view = new Float32Array(data.buffer); // create view into buffer
|
||||||
data.labels.length = 0;
|
data.labels.length = 0;
|
||||||
log.data('created shared buffer:', { maxDescriptors: (data.view?.length || 0) / options.descLength, totalBytes: data.buffer.byteLength, totalElements: data.view?.length });
|
log.data('created shared buffer:', { maxDescriptors: (data.view.length || 0) / options.descLength, totalBytes: data.buffer.byteLength, totalElements: data.view.length });
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
// @ts-nocheck
|
|
||||||
// based on: https://github.com/munrocket/gl-bench
|
// based on: https://github.com/munrocket/gl-bench
|
||||||
|
|
||||||
const UICSS = `
|
const UICSS = `
|
||||||
|
@ -43,9 +42,7 @@ class GLBench {
|
||||||
constructor(gl, settings = {}) {
|
constructor(gl, settings = {}) {
|
||||||
this.css = UICSS;
|
this.css = UICSS;
|
||||||
this.svg = UISVG;
|
this.svg = UISVG;
|
||||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
|
||||||
this.paramLogger = () => {};
|
this.paramLogger = () => {};
|
||||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
|
||||||
this.chartLogger = () => {};
|
this.chartLogger = () => {};
|
||||||
this.chartLen = 20;
|
this.chartLen = 20;
|
||||||
this.chartHz = 20;
|
this.chartHz = 20;
|
||||||
|
@ -92,7 +89,6 @@ class GLBench {
|
||||||
|
|
||||||
const addProfiler = (fn, self, target) => {
|
const addProfiler = (fn, self, target) => {
|
||||||
const t = self.now();
|
const t = self.now();
|
||||||
// eslint-disable-next-line prefer-rest-params
|
|
||||||
fn.apply(target, arguments);
|
fn.apply(target, arguments);
|
||||||
if (self.trackGPU) self.finished.push(glFinish(t, self.activeAccums.slice(0)));
|
if (self.trackGPU) self.finished.push(glFinish(t, self.activeAccums.slice(0)));
|
||||||
};
|
};
|
||||||
|
@ -107,13 +103,11 @@ class GLBench {
|
||||||
if (gl[fn]) {
|
if (gl[fn]) {
|
||||||
gl[fn] = addProfiler(gl[fn], this, gl);
|
gl[fn] = addProfiler(gl[fn], this, gl);
|
||||||
} else {
|
} else {
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.log('bench: cannot attach to webgl function');
|
console.log('bench: cannot attach to webgl function');
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
gl.getExtension = ((fn, self) => {
|
gl.getExtension = ((fn, self) => {
|
||||||
// eslint-disable-next-line prefer-rest-params
|
|
||||||
const ext = fn.apply(gl, arguments);
|
const ext = fn.apply(gl, arguments);
|
||||||
if (ext) {
|
if (ext) {
|
||||||
['drawElementsInstancedANGLE', 'drawBuffersWEBGL'].forEach((fn2) => {
|
['drawElementsInstancedANGLE', 'drawBuffersWEBGL'].forEach((fn2) => {
|
||||||
|
@ -148,7 +142,6 @@ class GLBench {
|
||||||
return (i, cpu, gpu, mem, fps, totalTime, frameId) => {
|
return (i, cpu, gpu, mem, fps, totalTime, frameId) => {
|
||||||
nodes['gl-cpu'][i].style.strokeDasharray = (cpu * 0.27).toFixed(0) + ' 100';
|
nodes['gl-cpu'][i].style.strokeDasharray = (cpu * 0.27).toFixed(0) + ' 100';
|
||||||
nodes['gl-gpu'][i].style.strokeDasharray = (gpu * 0.27).toFixed(0) + ' 100';
|
nodes['gl-gpu'][i].style.strokeDasharray = (gpu * 0.27).toFixed(0) + ' 100';
|
||||||
// eslint-disable-next-line no-nested-ternary
|
|
||||||
nodes['gl-mem'][i].innerHTML = names[i] ? names[i] : (mem ? 'mem: ' + mem.toFixed(0) + 'mb' : '');
|
nodes['gl-mem'][i].innerHTML = names[i] ? names[i] : (mem ? 'mem: ' + mem.toFixed(0) + 'mb' : '');
|
||||||
nodes['gl-fps'][i].innerHTML = 'FPS: ' + fps.toFixed(1);
|
nodes['gl-fps'][i].innerHTML = 'FPS: ' + fps.toFixed(1);
|
||||||
logger(names[i], cpu, gpu, mem, fps, totalTime, frameId);
|
logger(names[i], cpu, gpu, mem, fps, totalTime, frameId);
|
||||||
|
|
|
@ -64,9 +64,7 @@ function createNode() {
|
||||||
hideChildren() {
|
hideChildren() {
|
||||||
if (Array.isArray(this.children)) {
|
if (Array.isArray(this.children)) {
|
||||||
this.children.forEach((item) => {
|
this.children.forEach((item) => {
|
||||||
// @ts-ignore
|
|
||||||
item['elem']['classList'].add('hide');
|
item['elem']['classList'].add('hide');
|
||||||
// @ts-ignore
|
|
||||||
if (item['expanded']) item.hideChildren();
|
if (item['expanded']) item.hideChildren();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -74,9 +72,7 @@ function createNode() {
|
||||||
showChildren() {
|
showChildren() {
|
||||||
if (Array.isArray(this.children)) {
|
if (Array.isArray(this.children)) {
|
||||||
this.children.forEach((item) => {
|
this.children.forEach((item) => {
|
||||||
// @ts-ignore
|
|
||||||
item['elem']['classList'].remove('hide');
|
item['elem']['classList'].remove('hide');
|
||||||
// @ts-ignore
|
|
||||||
if (item['expanded']) item.showChildren();
|
if (item['expanded']) item.showChildren();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -119,7 +119,6 @@ class Menu {
|
||||||
|
|
||||||
this.menu.appendChild(this.container);
|
this.menu.appendChild(this.container);
|
||||||
if (typeof parent === 'object') parent.appendChild(this.menu);
|
if (typeof parent === 'object') parent.appendChild(this.menu);
|
||||||
// @ts-ignore undefined
|
|
||||||
else document.getElementById(parent).appendChild(this.menu);
|
else document.getElementById(parent).appendChild(this.menu);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -184,7 +183,6 @@ class Menu {
|
||||||
this.hidden = !this.hidden;
|
this.hidden = !this.hidden;
|
||||||
const all = document.getElementsByClassName('menu');
|
const all = document.getElementsByClassName('menu');
|
||||||
for (const item of all) {
|
for (const item of all) {
|
||||||
// @ts-ignore
|
|
||||||
item.style.display = this.hidden ? 'none' : 'block';
|
item.style.display = this.hidden ? 'none' : 'block';
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -241,7 +239,6 @@ class Menu {
|
||||||
el.addEventListener('change', (evt) => {
|
el.addEventListener('change', (evt) => {
|
||||||
if (evt.target) {
|
if (evt.target) {
|
||||||
object[variable] = parseInt(evt.target['value']) === parseFloat(evt.target['value']) ? parseInt(evt.target['value']) : parseFloat(evt.target['value']);
|
object[variable] = parseInt(evt.target['value']) === parseFloat(evt.target['value']) ? parseInt(evt.target['value']) : parseFloat(evt.target['value']);
|
||||||
// @ts-ignore
|
|
||||||
evt.target.setAttribute('value', evt.target['value']);
|
evt.target.setAttribute('value', evt.target['value']);
|
||||||
if (callback) callback(evt.target['value']);
|
if (callback) callback(evt.target['value']);
|
||||||
}
|
}
|
||||||
|
@ -286,7 +283,6 @@ class Menu {
|
||||||
return el;
|
return el;
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line class-methods-use-this
|
|
||||||
updateValue(title, val, suffix = '') {
|
updateValue(title, val, suffix = '') {
|
||||||
const el = document.getElementById(`menu-val-${title}`);
|
const el = document.getElementById(`menu-val-${title}`);
|
||||||
if (el) el.innerText = `${title}: ${val}${suffix}`;
|
if (el) el.innerText = `${title}: ${val}${suffix}`;
|
||||||
|
@ -303,11 +299,9 @@ class Menu {
|
||||||
return el;
|
return el;
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line class-methods-use-this
|
|
||||||
async updateChart(id, values) {
|
async updateChart(id, values) {
|
||||||
if (!values || (values.length === 0)) return;
|
if (!values || (values.length === 0)) return;
|
||||||
/** @type {HTMLCanvasElement} */
|
/** @type {HTMLCanvasElement} */
|
||||||
// @ts-ignore undefined
|
|
||||||
const canvas = document.getElementById(`menu-canvas-${id}`);
|
const canvas = document.getElementById(`menu-canvas-${id}`);
|
||||||
if (!canvas) return;
|
if (!canvas) return;
|
||||||
const ctx = canvas.getContext('2d');
|
const ctx = canvas.getContext('2d');
|
||||||
|
|
|
@ -4,8 +4,7 @@ async function log(...msg) {
|
||||||
if (debug) {
|
if (debug) {
|
||||||
const dt = new Date();
|
const dt = new Date();
|
||||||
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
||||||
// eslint-disable-next-line no-console
|
console.log(ts, 'webrtc', ...msg); // eslint-disable-line no-console
|
||||||
console.log(ts, 'webrtc', ...msg);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
* PWA Service Worker for Human main demo
|
* PWA Service Worker for Human main demo
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/* eslint-disable no-restricted-globals */
|
||||||
/// <reference lib="webworker" />
|
/// <reference lib="webworker" />
|
||||||
|
|
||||||
const skipCaching = false;
|
const skipCaching = false;
|
||||||
|
@ -19,8 +20,7 @@ const stats = { hit: 0, miss: 0 };
|
||||||
const log = (...msg) => {
|
const log = (...msg) => {
|
||||||
const dt = new Date();
|
const dt = new Date();
|
||||||
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
||||||
// eslint-disable-next-line no-console
|
console.log(ts, 'pwa', ...msg); // eslint-disable-line no-console
|
||||||
console.log(ts, 'pwa', ...msg);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
async function updateCached(req) {
|
async function updateCached(req) {
|
||||||
|
@ -28,7 +28,7 @@ async function updateCached(req) {
|
||||||
.then((update) => {
|
.then((update) => {
|
||||||
// update cache if request is ok
|
// update cache if request is ok
|
||||||
if (update.ok) {
|
if (update.ok) {
|
||||||
caches
|
caches // eslint-disable-line promise/no-nesting
|
||||||
.open(cacheName)
|
.open(cacheName)
|
||||||
.then((cache) => cache.put(req, update))
|
.then((cache) => cache.put(req, update))
|
||||||
.catch((err) => log('cache update error', err));
|
.catch((err) => log('cache update error', err));
|
||||||
|
@ -75,14 +75,13 @@ async function getCached(evt) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function cacheInit() {
|
function cacheInit() {
|
||||||
// eslint-disable-next-line promise/catch-or-return
|
|
||||||
caches.open(cacheName)
|
caches.open(cacheName)
|
||||||
// eslint-disable-next-line promise/no-nesting
|
.then((cache) => cache.addAll(cacheFiles) // eslint-disable-line promise/no-nesting
|
||||||
.then((cache) => cache.addAll(cacheFiles)
|
|
||||||
.then(
|
.then(
|
||||||
() => log('cache refresh:', cacheFiles.length, 'files'),
|
() => log('cache refresh:', cacheFiles.length, 'files'),
|
||||||
(err) => log('cache error', err),
|
(err) => log('cache error', err),
|
||||||
));
|
))
|
||||||
|
.catch(() => log('cache error'));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!listening) {
|
if (!listening) {
|
||||||
|
@ -99,14 +98,12 @@ if (!listening) {
|
||||||
|
|
||||||
self.addEventListener('install', (evt) => {
|
self.addEventListener('install', (evt) => {
|
||||||
log('install');
|
log('install');
|
||||||
// @ts-ignore scope for self is ServiceWorkerGlobalScope not Window
|
|
||||||
self.skipWaiting();
|
self.skipWaiting();
|
||||||
evt.waitUntil(cacheInit);
|
evt.waitUntil(cacheInit);
|
||||||
});
|
});
|
||||||
|
|
||||||
self.addEventListener('activate', (evt) => {
|
self.addEventListener('activate', (evt) => {
|
||||||
log('activate');
|
log('activate');
|
||||||
// @ts-ignore scope for self is ServiceWorkerGlobalScope not Window
|
|
||||||
evt.waitUntil(self.clients.claim());
|
evt.waitUntil(self.clients.claim());
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -114,7 +111,7 @@ if (!listening) {
|
||||||
const uri = new URL(evt.request.url);
|
const uri = new URL(evt.request.url);
|
||||||
// if (uri.pathname === '/') { log('cache skip /', evt.request); return; } // skip root access requests
|
// if (uri.pathname === '/') { log('cache skip /', evt.request); return; } // skip root access requests
|
||||||
if (evt.request.cache === 'only-if-cached' && evt.request.mode !== 'same-origin') return; // required due to chrome bug
|
if (evt.request.cache === 'only-if-cached' && evt.request.mode !== 'same-origin') return; // required due to chrome bug
|
||||||
if (uri.origin !== location.origin) return; // skip non-local requests
|
if (uri.origin !== self.location.origin) return; // skip non-local requests
|
||||||
if (evt.request.method !== 'GET') return; // only cache get requests
|
if (evt.request.method !== 'GET') return; // only cache get requests
|
||||||
if (evt.request.url.includes('/api/')) return; // don't cache api requests, failures are handled at the time of call
|
if (evt.request.url.includes('/api/')) return; // don't cache api requests, failures are handled at the time of call
|
||||||
|
|
||||||
|
@ -129,7 +126,7 @@ if (!listening) {
|
||||||
log(`PWA: ${evt.type}`);
|
log(`PWA: ${evt.type}`);
|
||||||
if (refreshed) return;
|
if (refreshed) return;
|
||||||
refreshed = true;
|
refreshed = true;
|
||||||
location.reload();
|
self.location.reload();
|
||||||
});
|
});
|
||||||
|
|
||||||
listening = true;
|
listening = true;
|
||||||
|
|
|
@ -6,10 +6,9 @@
|
||||||
/// <reference lib="webworker"/>
|
/// <reference lib="webworker"/>
|
||||||
|
|
||||||
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
||||||
self.importScripts('../dist/human.js');
|
self.importScripts('../dist/human.js'); // eslint-disable-line no-restricted-globals
|
||||||
|
|
||||||
let busy = false;
|
let busy = false;
|
||||||
// @ts-ignore
|
|
||||||
// eslint-disable-next-line new-cap, no-undef
|
// eslint-disable-next-line new-cap, no-undef
|
||||||
const human = new Human.default();
|
const human = new Human.default();
|
||||||
|
|
||||||
|
|
|
@ -89,9 +89,9 @@
|
||||||
<body>
|
<body>
|
||||||
<div id="play" class="play icon-play"></div>
|
<div id="play" class="play icon-play"></div>
|
||||||
<div id="background">
|
<div id="background">
|
||||||
<div class='wave one'></div>
|
<div class="wave one"></div>
|
||||||
<div class='wave two'></div>
|
<div class="wave two"></div>
|
||||||
<div class='wave three'></div>
|
<div class="wave three"></div>
|
||||||
</div>
|
</div>
|
||||||
<div id="loader" class="loader"></div>
|
<div id="loader" class="loader"></div>
|
||||||
<div id="status" class="status"></div>
|
<div id="status" class="status"></div>
|
||||||
|
|
104
demo/index.js
104
demo/index.js
|
@ -20,9 +20,7 @@
|
||||||
|
|
||||||
// test url <https://human.local/?worker=false&async=false&bench=false&draw=true&warmup=full&backend=humangl>
|
// test url <https://human.local/?worker=false&async=false&bench=false&draw=true&warmup=full&backend=humangl>
|
||||||
|
|
||||||
// @ts-nocheck // typescript checks disabled as this is pure javascript
|
import { Human } from '../dist/human.esm.js'; // equivalent of @vladmandic/human
|
||||||
|
|
||||||
import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human
|
|
||||||
import Menu from './helpers/menu.js';
|
import Menu from './helpers/menu.js';
|
||||||
import GLBench from './helpers/gl-bench.js';
|
import GLBench from './helpers/gl-bench.js';
|
||||||
import webRTC from './helpers/webrtc.js';
|
import webRTC from './helpers/webrtc.js';
|
||||||
|
@ -153,7 +151,7 @@ let bench;
|
||||||
let lastDetectedResult = {};
|
let lastDetectedResult = {};
|
||||||
|
|
||||||
// helper function: async pause
|
// helper function: async pause
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars, no-unused-vars
|
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||||
const delay = (ms) => new Promise((resolve) => { setTimeout(resolve, ms); });
|
const delay = (ms) => new Promise((resolve) => { setTimeout(resolve, ms); });
|
||||||
|
|
||||||
// helper function: translates json to human readable string
|
// helper function: translates json to human readable string
|
||||||
|
@ -171,8 +169,7 @@ function str(...msg) {
|
||||||
function log(...msg) {
|
function log(...msg) {
|
||||||
const dt = new Date();
|
const dt = new Date();
|
||||||
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
||||||
// eslint-disable-next-line no-console
|
if (ui.console) console.log(ts, ...msg); // eslint-disable-line no-console
|
||||||
if (ui.console) console.log(ts, ...msg);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let prevStatus = '';
|
let prevStatus = '';
|
||||||
|
@ -349,12 +346,10 @@ async function drawResults(input) {
|
||||||
videoPause();
|
videoPause();
|
||||||
ui.drawThread = null;
|
ui.drawThread = null;
|
||||||
}
|
}
|
||||||
} else {
|
} else if (ui.drawThread) {
|
||||||
if (ui.drawThread) {
|
log('stopping buffered refresh');
|
||||||
log('stopping buffered refresh');
|
cancelAnimationFrame(ui.drawThread);
|
||||||
cancelAnimationFrame(ui.drawThread);
|
ui.drawThread = null;
|
||||||
ui.drawThread = null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -445,8 +440,7 @@ async function setupCamera() {
|
||||||
ui.menuWidth.input.setAttribute('value', video.videoWidth);
|
ui.menuWidth.input.setAttribute('value', video.videoWidth);
|
||||||
ui.menuHeight.input.setAttribute('value', video.videoHeight);
|
ui.menuHeight.input.setAttribute('value', video.videoHeight);
|
||||||
if (live || ui.autoPlay) await videoPlay();
|
if (live || ui.autoPlay) await videoPlay();
|
||||||
// eslint-disable-next-line no-use-before-define
|
if ((live || ui.autoPlay) && !ui.detectThread) runHumanDetect(video, canvas); // eslint-disable-line no-use-before-define
|
||||||
if ((live || ui.autoPlay) && !ui.detectThread) runHumanDetect(video, canvas);
|
|
||||||
return 'camera stream ready';
|
return 'camera stream ready';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -500,8 +494,7 @@ function webWorker(input, image, canvas, timestamp) {
|
||||||
ui.framesDetect++;
|
ui.framesDetect++;
|
||||||
if (!ui.drawThread) drawResults(input);
|
if (!ui.drawThread) drawResults(input);
|
||||||
if (isLive(input)) {
|
if (isLive(input)) {
|
||||||
// eslint-disable-next-line no-use-before-define
|
ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now)); // eslint-disable-line no-use-before-define
|
||||||
ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now));
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -538,36 +531,39 @@ function runHumanDetect(input, canvas, timestamp) {
|
||||||
// perform detection in worker
|
// perform detection in worker
|
||||||
webWorker(input, data, canvas, timestamp);
|
webWorker(input, data, canvas, timestamp);
|
||||||
} else {
|
} else {
|
||||||
human.detect(input, userConfig).then((result) => {
|
human.detect(input, userConfig)
|
||||||
status();
|
.then((result) => {
|
||||||
/*
|
status();
|
||||||
setTimeout(async () => { // simulate gl context lost 2sec after initial detection
|
/*
|
||||||
const ext = human.gl && human.gl.gl ? human.gl.gl.getExtension('WEBGL_lose_context') : {};
|
setTimeout(async () => { // simulate gl context lost 2sec after initial detection
|
||||||
if (ext && ext.loseContext) {
|
const ext = human.gl && human.gl.gl ? human.gl.gl.getExtension('WEBGL_lose_context') : {};
|
||||||
log('simulate context lost:', human.env.webgl, human.gl, ext);
|
if (ext && ext.loseContext) {
|
||||||
human.gl.gl.getExtension('WEBGL_lose_context').loseContext();
|
log('simulate context lost:', human.env.webgl, human.gl, ext);
|
||||||
await videoPause();
|
human.gl.gl.getExtension('WEBGL_lose_context').loseContext();
|
||||||
status('Exception: WebGL');
|
await videoPause();
|
||||||
|
status('Exception: WebGL');
|
||||||
|
}
|
||||||
|
}, 2000);
|
||||||
|
*/
|
||||||
|
if (result.performance && result.performance.total) ui.detectFPS.push(1000 / result.performance.total);
|
||||||
|
if (ui.detectFPS.length > ui.maxFPSframes) ui.detectFPS.shift();
|
||||||
|
if (ui.bench) {
|
||||||
|
if (!bench) initPerfMonitor();
|
||||||
|
bench.nextFrame(timestamp);
|
||||||
}
|
}
|
||||||
}, 2000);
|
if (document.getElementById('gl-bench')) document.getElementById('gl-bench').style.display = ui.bench ? 'block' : 'none';
|
||||||
*/
|
if (result.error) {
|
||||||
if (result.performance && result.performance.total) ui.detectFPS.push(1000 / result.performance.total);
|
log(result.error);
|
||||||
if (ui.detectFPS.length > ui.maxFPSframes) ui.detectFPS.shift();
|
document.getElementById('log').innerText += `\nHuman error: ${result.error}`;
|
||||||
if (ui.bench) {
|
} else {
|
||||||
if (!bench) initPerfMonitor();
|
lastDetectedResult = result;
|
||||||
bench.nextFrame(timestamp);
|
if (!ui.drawThread) drawResults(input);
|
||||||
}
|
ui.framesDetect++;
|
||||||
if (document.getElementById('gl-bench')) document.getElementById('gl-bench').style.display = ui.bench ? 'block' : 'none';
|
ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now));
|
||||||
if (result.error) {
|
}
|
||||||
log(result.error);
|
return result;
|
||||||
document.getElementById('log').innerText += `\nHuman error: ${result.error}`;
|
})
|
||||||
} else {
|
.catch(() => log('human detect error'));
|
||||||
lastDetectedResult = result;
|
|
||||||
if (!ui.drawThread) drawResults(input);
|
|
||||||
ui.framesDetect++;
|
|
||||||
ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -614,8 +610,7 @@ async function processImage(input, title) {
|
||||||
// copy to clipboard on click
|
// copy to clipboard on click
|
||||||
if (typeof ClipboardItem !== 'undefined' && navigator.clipboard) {
|
if (typeof ClipboardItem !== 'undefined' && navigator.clipboard) {
|
||||||
evt.target.toBlob((blob) => {
|
evt.target.toBlob((blob) => {
|
||||||
// eslint-disable-next-line no-undef
|
const item = new ClipboardItem({ 'image/png': blob }); // eslint-disable-line no-undef
|
||||||
const item = new ClipboardItem({ 'image/png': blob });
|
|
||||||
navigator.clipboard.write([item]);
|
navigator.clipboard.write([item]);
|
||||||
log('copied image to clipboard');
|
log('copied image to clipboard');
|
||||||
});
|
});
|
||||||
|
@ -938,10 +933,10 @@ async function pwaRegister() {
|
||||||
const regs = await navigator.serviceWorker.getRegistrations();
|
const regs = await navigator.serviceWorker.getRegistrations();
|
||||||
for (const reg of regs) {
|
for (const reg of regs) {
|
||||||
log('pwa found:', reg.scope);
|
log('pwa found:', reg.scope);
|
||||||
if (reg.scope.startsWith(location.origin)) found = reg;
|
if (reg.scope.startsWith(window.location.origin)) found = reg;
|
||||||
}
|
}
|
||||||
if (!found) {
|
if (!found) {
|
||||||
const reg = await navigator.serviceWorker.register(pwa.scriptFile, { scope: location.pathname });
|
const reg = await navigator.serviceWorker.register(pwa.scriptFile, { scope: window.location.pathname });
|
||||||
found = reg;
|
found = reg;
|
||||||
log('pwa registered:', reg.scope);
|
log('pwa registered:', reg.scope);
|
||||||
}
|
}
|
||||||
|
@ -973,8 +968,7 @@ async function main() {
|
||||||
if (ui.detectThread) cancelAnimationFrame(ui.detectThread);
|
if (ui.detectThread) cancelAnimationFrame(ui.detectThread);
|
||||||
if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
|
if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
|
||||||
const msg = evt.reason.message || evt.reason || evt;
|
const msg = evt.reason.message || evt.reason || evt;
|
||||||
// eslint-disable-next-line no-console
|
console.error(msg); // eslint-disable-line no-console
|
||||||
console.error(msg);
|
|
||||||
document.getElementById('log').innerHTML = msg;
|
document.getElementById('log').innerHTML = msg;
|
||||||
status(`exception: ${msg}`);
|
status(`exception: ${msg}`);
|
||||||
evt.preventDefault();
|
evt.preventDefault();
|
||||||
|
@ -997,7 +991,7 @@ async function main() {
|
||||||
await pwaRegister();
|
await pwaRegister();
|
||||||
|
|
||||||
// parse url search params
|
// parse url search params
|
||||||
const params = new URLSearchParams(location.search);
|
const params = new URLSearchParams(window.location.search);
|
||||||
log('url options:', params.toString());
|
log('url options:', params.toString());
|
||||||
if (params.has('worker')) {
|
if (params.has('worker')) {
|
||||||
ui.useWorker = JSON.parse(params.get('worker'));
|
ui.useWorker = JSON.parse(params.get('worker'));
|
||||||
|
@ -1040,10 +1034,8 @@ async function main() {
|
||||||
// we've merged human defaults with user config and now lets store it back so it can be accessed by methods such as menu
|
// we've merged human defaults with user config and now lets store it back so it can be accessed by methods such as menu
|
||||||
userConfig = human.config;
|
userConfig = human.config;
|
||||||
if (typeof tf !== 'undefined') {
|
if (typeof tf !== 'undefined') {
|
||||||
// eslint-disable-next-line no-undef
|
log('TensorFlow external version:', tf.version); // eslint-disable-line no-undef
|
||||||
log('TensorFlow external version:', tf.version);
|
human.tf = tf; // eslint-disable-line no-undef
|
||||||
// eslint-disable-next-line no-undef
|
|
||||||
human.tf = tf; // use externally loaded version of tfjs
|
|
||||||
}
|
}
|
||||||
log('tfjs version:', human.tf.version.tfjs);
|
log('tfjs version:', human.tf.version.tfjs);
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import Human from '../../dist/human.esm.js'; // equivalent of @vladmandic/human
|
import { Human } from '../../dist/human.esm.js'; // equivalent of @vladmandic/human
|
||||||
import GLBench from '../helpers/gl-bench.js';
|
import GLBench from '../helpers/gl-bench.js';
|
||||||
|
|
||||||
const workerJS = './worker.js';
|
const workerJS = './worker.js';
|
||||||
|
@ -130,8 +130,7 @@ const result = { // initialize empty result object which will be partially fille
|
||||||
function log(...msg) {
|
function log(...msg) {
|
||||||
const dt = new Date();
|
const dt = new Date();
|
||||||
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
||||||
// eslint-disable-next-line no-console
|
console.log(ts, ...msg); // eslint-disable-line no-console
|
||||||
console.log(ts, ...msg);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function drawResults() {
|
async function drawResults() {
|
||||||
|
@ -203,7 +202,9 @@ async function setupCamera() {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
// enumerate devices for diag purposes
|
// enumerate devices for diag purposes
|
||||||
navigator.mediaDevices.enumerateDevices().then((devices) => log('enumerated devices:', devices));
|
navigator.mediaDevices.enumerateDevices()
|
||||||
|
.then((devices) => log('enumerated devices:', devices))
|
||||||
|
.catch(() => log('mediaDevices error'));
|
||||||
log('camera constraints', constraints);
|
log('camera constraints', constraints);
|
||||||
try {
|
try {
|
||||||
stream = await navigator.mediaDevices.getUserMedia(constraints);
|
stream = await navigator.mediaDevices.getUserMedia(constraints);
|
||||||
|
@ -230,7 +231,7 @@ async function setupCamera() {
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
// attach input to video element
|
// attach input to video element
|
||||||
if (stream && video) video['srcObject'] = stream;
|
if (stream && video) video.srcObject = stream;
|
||||||
return promise;
|
return promise;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,11 +6,10 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
|
|
||||||
// workers actual import tfjs and human modules
|
// workers actual import tfjs and human modules
|
||||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
const tf = require('@tensorflow/tfjs-node');
|
|
||||||
const Human = require('../../dist/human.node.js').default; // or const Human = require('../dist/human.node-gpu.js').default;
|
const Human = require('../../dist/human.node.js').default; // or const Human = require('../dist/human.node-gpu.js').default;
|
||||||
|
|
||||||
let human = null;
|
let human = null;
|
||||||
|
@ -64,11 +63,9 @@ async function main() {
|
||||||
|
|
||||||
// on worker start first initialize message handler so we don't miss any messages
|
// on worker start first initialize message handler so we don't miss any messages
|
||||||
process.on('message', (msg) => {
|
process.on('message', (msg) => {
|
||||||
// @ts-ignore
|
// if main told worker to exit
|
||||||
if (msg.exit && process.exit) process.exit(); // if main told worker to exit
|
if (msg.exit && process.exit) process.exit(); // eslint-disable-line no-process-exit
|
||||||
// @ts-ignore
|
|
||||||
if (msg.test && process.send) process.send({ test: true });
|
if (msg.test && process.send) process.send({ test: true });
|
||||||
// @ts-ignore
|
|
||||||
if (msg.image) detect(msg.image); // if main told worker to process image
|
if (msg.image) detect(msg.image); // if main told worker to process image
|
||||||
log.data('Worker received message:', process.pid, msg); // generic log
|
log.data('Worker received message:', process.pid, msg); // generic log
|
||||||
});
|
});
|
||||||
|
|
|
@ -8,9 +8,8 @@
|
||||||
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features
|
const childProcess = require('child_process'); // eslint-disable-line camelcase
|
||||||
const child_process = require('child_process');
|
|
||||||
// note that main process does not import human or tfjs at all, it's all done from worker process
|
// note that main process does not import human or tfjs at all, it's all done from worker process
|
||||||
|
|
||||||
const workerFile = 'demo/multithread/node-multiprocess-worker.js';
|
const workerFile = 'demo/multithread/node-multiprocess-worker.js';
|
||||||
|
@ -74,7 +73,7 @@ async function main() {
|
||||||
// manage worker processes
|
// manage worker processes
|
||||||
for (let i = 0; i < numWorkers; i++) {
|
for (let i = 0; i < numWorkers; i++) {
|
||||||
// create worker process
|
// create worker process
|
||||||
workers[i] = await child_process.fork(workerFile, ['special']);
|
workers[i] = await childProcess.fork(workerFile, ['special']);
|
||||||
// parse message that worker process sends back to main
|
// parse message that worker process sends back to main
|
||||||
// if message is ready, dispatch next image in queue
|
// if message is ready, dispatch next image in queue
|
||||||
// if message is processing result, just print how many faces were detected
|
// if message is processing result, just print how many faces were detected
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
/// <reference lib="webworker" />
|
/// <reference lib="webworker" />
|
||||||
|
|
||||||
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
||||||
self.importScripts('../../dist/human.js');
|
self.importScripts('../../dist/human.js'); // eslint-disable-line no-restricted-globals
|
||||||
|
|
||||||
let human;
|
let human;
|
||||||
|
|
||||||
|
@ -9,9 +9,8 @@ onmessage = async (msg) => {
|
||||||
// received from index.js using:
|
// received from index.js using:
|
||||||
// worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
|
// worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
|
||||||
|
|
||||||
// @ts-ignore // Human is registered as global namespace using IIFE script
|
// Human is registered as global namespace using IIFE script
|
||||||
// eslint-disable-next-line no-undef, new-cap
|
if (!human) human = new Human.default(msg.data.config); // eslint-disable-line no-undef, new-cap
|
||||||
if (!human) human = new Human.default(msg.data.config);
|
|
||||||
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
||||||
let result = {};
|
let result = {};
|
||||||
result = await human.detect(image, msg.data.config);
|
result = await human.detect(image, msg.data.config);
|
||||||
|
|
|
@ -6,9 +6,10 @@
|
||||||
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const process = require('process');
|
const process = require('process');
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
const canvas = require('canvas'); // eslint-disable-line node/no-extraneous-require, node/no-missing-require
|
// in nodejs environments tfjs-node is required to be loaded before human
|
||||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
|
const canvas = require('canvas'); // eslint-disable-line node/no-unpublished-require
|
||||||
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
||||||
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
||||||
|
|
||||||
|
@ -63,7 +64,7 @@ async function main() {
|
||||||
const face = persons[i].face;
|
const face = persons[i].face;
|
||||||
const faceTxt = face ? `score:${face.score} age:${face.age} gender:${face.gender} iris:${face.iris}` : null;
|
const faceTxt = face ? `score:${face.score} age:${face.age} gender:${face.gender} iris:${face.iris}` : null;
|
||||||
const body = persons[i].body;
|
const body = persons[i].body;
|
||||||
const bodyTxt = body ? `score:${body.score} keypoints:${body.keypoints?.length}` : null;
|
const bodyTxt = body ? `score:${body.score} keypoints:${body.keypoints.length}` : null;
|
||||||
log.data(`Detected: #${i}: Face:${faceTxt} Body:${bodyTxt} LeftHand:${persons[i].hands.left ? 'yes' : 'no'} RightHand:${persons[i].hands.right ? 'yes' : 'no'} Gestures:${persons[i].gestures.length}`);
|
log.data(`Detected: #${i}: Face:${faceTxt} Body:${bodyTxt} LeftHand:${persons[i].hands.left ? 'yes' : 'no'} RightHand:${persons[i].hands.right ? 'yes' : 'no'} Gestures:${persons[i].gestures.length}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +72,6 @@ async function main() {
|
||||||
const outputCanvas = new canvas.Canvas(inputImage.width, inputImage.height); // create canvas
|
const outputCanvas = new canvas.Canvas(inputImage.width, inputImage.height); // create canvas
|
||||||
const outputCtx = outputCanvas.getContext('2d');
|
const outputCtx = outputCanvas.getContext('2d');
|
||||||
outputCtx.drawImage(result.canvas || inputImage, 0, 0); // draw input image onto canvas
|
outputCtx.drawImage(result.canvas || inputImage, 0, 0); // draw input image onto canvas
|
||||||
// @ts-ignore canvas is not checked for typedefs
|
|
||||||
human.draw.all(outputCanvas, result); // use human build-in method to draw results as overlays on canvas
|
human.draw.all(outputCanvas, result); // use human build-in method to draw results as overlays on canvas
|
||||||
const outFile = fs.createWriteStream(output); // write canvas to new image file
|
const outFile = fs.createWriteStream(output); // write canvas to new image file
|
||||||
outFile.on('finish', () => log.state('Output image:', output, outputCanvas.width, outputCanvas.height));
|
outFile.on('finish', () => log.state('Output image:', output, outputCanvas.width, outputCanvas.height));
|
||||||
|
|
|
@ -2,13 +2,14 @@
|
||||||
* Human demo for NodeJS
|
* Human demo for NodeJS
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const log = require('@vladmandic/pilogger');
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const process = require('process');
|
const process = require('process');
|
||||||
|
|
||||||
let fetch; // fetch is dynamically imported later
|
let fetch; // fetch is dynamically imported later
|
||||||
|
|
||||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
|
// in nodejs environments tfjs-node is required to be loaded before human
|
||||||
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
||||||
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
||||||
|
|
||||||
|
@ -37,7 +38,7 @@ async function detect(input) {
|
||||||
let buffer;
|
let buffer;
|
||||||
log.info('Loading image:', input);
|
log.info('Loading image:', input);
|
||||||
if (input.startsWith('http:') || input.startsWith('https:')) {
|
if (input.startsWith('http:') || input.startsWith('https:')) {
|
||||||
fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-extraneous-require, node/no-missing-import
|
fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-unpublished-import
|
||||||
const res = await fetch(input);
|
const res = await fetch(input);
|
||||||
if (res && res.ok) buffer = await res.buffer();
|
if (res && res.ok) buffer = await res.buffer();
|
||||||
else log.error('Invalid image URL:', input, res.status, res.statusText, res.headers.get('content-type'));
|
else log.error('Invalid image URL:', input, res.status, res.statusText, res.headers.get('content-type'));
|
||||||
|
|
|
@ -4,10 +4,10 @@
|
||||||
* Requires [node-fetch](https://www.npmjs.com/package/node-fetch) to provide `fetch` functionality in NodeJS environment
|
* Requires [node-fetch](https://www.npmjs.com/package/node-fetch) to provide `fetch` functionality in NodeJS environment
|
||||||
*/
|
*/
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
|
|
||||||
// eslint-disable-next-line import/no-extraneous-dependencies, no-unused-vars, @typescript-eslint/no-unused-vars
|
// in nodejs environments tfjs-node is required to be loaded before human
|
||||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
||||||
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ const humanConfig = {
|
||||||
|
|
||||||
async function main(inputFile) {
|
async function main(inputFile) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
global.fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-extraneous-require, node/no-missing-import
|
global.fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-unpublished-import
|
||||||
const human = new Human.Human(humanConfig); // create instance of human using default configuration
|
const human = new Human.Human(humanConfig); // create instance of human using default configuration
|
||||||
log.info('Human:', human.version, 'TF:', tf.version_core);
|
log.info('Human:', human.version, 'TF:', tf.version_core);
|
||||||
await human.load(); // optional as models would be loaded on-demand first time they are required
|
await human.load(); // optional as models would be loaded on-demand first time they are required
|
||||||
|
|
|
@ -2,12 +2,12 @@
|
||||||
* Human Person Similarity test for NodeJS
|
* Human Person Similarity test for NodeJS
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const log = require('@vladmandic/pilogger');
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const process = require('process');
|
const process = require('process');
|
||||||
|
|
||||||
// eslint-disable-next-line import/no-extraneous-dependencies, no-unused-vars, @typescript-eslint/no-unused-vars
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human
|
// in nodejs environments tfjs-node is required to be loaded before human
|
||||||
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
||||||
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ const myConfig = {
|
||||||
async function init() {
|
async function init() {
|
||||||
human = new Human.Human(myConfig);
|
human = new Human.Human(myConfig);
|
||||||
await human.tf.ready();
|
await human.tf.ready();
|
||||||
log.info('Human:', human.version);
|
log.info('Human:', human.version, 'TF:', tf.version_core);
|
||||||
await human.load();
|
await human.load();
|
||||||
const loaded = Object.keys(human.models).filter((a) => human.models[a]);
|
const loaded = Object.keys(human.models).filter((a) => human.models[a]);
|
||||||
log.info('Loaded:', loaded);
|
log.info('Loaded:', loaded);
|
||||||
|
@ -34,12 +34,11 @@ async function init() {
|
||||||
|
|
||||||
async function detect(input) {
|
async function detect(input) {
|
||||||
if (!fs.existsSync(input)) {
|
if (!fs.existsSync(input)) {
|
||||||
log.error('Cannot load image:', input);
|
throw new Error('Cannot load image:', input);
|
||||||
process.exit(1);
|
|
||||||
}
|
}
|
||||||
const buffer = fs.readFileSync(input);
|
const buffer = fs.readFileSync(input);
|
||||||
const tensor = human.tf.node.decodeImage(buffer, 3);
|
const tensor = human.tf.node.decodeImage(buffer, 3);
|
||||||
log.state('Loaded image:', input, tensor['shape']);
|
log.state('Loaded image:', input, tensor.shape);
|
||||||
const result = await human.detect(tensor, myConfig);
|
const result = await human.detect(tensor, myConfig);
|
||||||
human.tf.dispose(tensor);
|
human.tf.dispose(tensor);
|
||||||
log.state('Detected faces:', result.face.length);
|
log.state('Detected faces:', result.face.length);
|
||||||
|
@ -50,15 +49,13 @@ async function main() {
|
||||||
log.configure({ inspect: { breakLength: 265 } });
|
log.configure({ inspect: { breakLength: 265 } });
|
||||||
log.header();
|
log.header();
|
||||||
if (process.argv.length !== 4) {
|
if (process.argv.length !== 4) {
|
||||||
log.error('Parameters: <first image> <second image> missing');
|
throw new Error('Parameters: <first image> <second image> missing');
|
||||||
process.exit(1);
|
|
||||||
}
|
}
|
||||||
await init();
|
await init();
|
||||||
const res1 = await detect(process.argv[2]);
|
const res1 = await detect(process.argv[2]);
|
||||||
const res2 = await detect(process.argv[3]);
|
const res2 = await detect(process.argv[3]);
|
||||||
if (!res1 || !res1.face || res1.face.length === 0 || !res2 || !res2.face || res2.face.length === 0) {
|
if (!res1 || !res1.face || res1.face.length === 0 || !res2 || !res2.face || res2.face.length === 0) {
|
||||||
log.error('Could not detect face descriptors');
|
throw new Error('Could not detect face descriptors');
|
||||||
process.exit(1);
|
|
||||||
}
|
}
|
||||||
const similarity = human.similarity(res1.face[0].embedding, res2.face[0].embedding, { order: 2 });
|
const similarity = human.similarity(res1.face[0].embedding, res2.face[0].embedding, { order: 2 });
|
||||||
log.data('Similarity: ', similarity);
|
log.data('Similarity: ', similarity);
|
||||||
|
|
|
@ -5,7 +5,8 @@
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const process = require('process');
|
const process = require('process');
|
||||||
|
|
||||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human
|
// in nodejs environments tfjs-node is required to be loaded before human
|
||||||
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
||||||
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
||||||
|
|
||||||
|
|
|
@ -12,11 +12,11 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const spawn = require('child_process').spawn;
|
const spawn = require('child_process').spawn;
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
// @ts-ignore pipe2jpeg is not installed by default
|
// in nodejs environments tfjs-node is required to be loaded before human
|
||||||
const Pipe2Jpeg = require('pipe2jpeg'); // eslint-disable-line node/no-missing-require
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
|
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
||||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human
|
const Pipe2Jpeg = require('pipe2jpeg'); // eslint-disable-line node/no-missing-require, import/no-unresolved
|
||||||
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
||||||
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
||||||
|
|
||||||
|
|
|
@ -7,10 +7,11 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
let initial = true; // remember if this is the first run to print additional details
|
let initial = true; // remember if this is the first run to print additional details
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
const nodeWebCam = require('node-webcam'); // eslint-disable-line node/no-missing-require, node/no-extraneous-require
|
const nodeWebCam = require('node-webcam'); // eslint-disable-line import/no-unresolved, node/no-missing-require
|
||||||
|
|
||||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human
|
// in nodejs environments tfjs-node is required to be loaded before human
|
||||||
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
||||||
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
||||||
|
|
||||||
|
@ -59,18 +60,20 @@ async function detect() {
|
||||||
} else {
|
} else {
|
||||||
const tensor = buffer2tensor(data); // create tensor from image buffer
|
const tensor = buffer2tensor(data); // create tensor from image buffer
|
||||||
if (initial) log.data('input tensor:', tensor.shape);
|
if (initial) log.data('input tensor:', tensor.shape);
|
||||||
// eslint-disable-next-line promise/no-promise-in-callback
|
human.detect(tensor) // eslint-disable-line promise/no-promise-in-callback
|
||||||
human.detect(tensor).then((result) => {
|
.then((result) => {
|
||||||
if (result && result.face && result.face.length > 0) {
|
if (result && result.face && result.face.length > 0) {
|
||||||
for (let i = 0; i < result.face.length; i++) {
|
for (let i = 0; i < result.face.length; i++) {
|
||||||
const face = result.face[i];
|
const face = result.face[i];
|
||||||
const emotion = face.emotion?.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
|
const emotion = face.emotion?.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
|
||||||
log.data(`detected face: #${i} boxScore:${face.boxScore} faceScore:${face.faceScore} age:${face.age} genderScore:${face.genderScore} gender:${face.gender} emotionScore:${emotion?.score} emotion:${emotion?.emotion} iris:${face.iris}`);
|
log.data(`detected face: #${i} boxScore:${face.boxScore} faceScore:${face.faceScore} age:${face.age} genderScore:${face.genderScore} gender:${face.gender} emotionScore:${emotion?.score} emotion:${emotion?.emotion} iris:${face.iris}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.data(' Face: N/A');
|
||||||
}
|
}
|
||||||
} else {
|
return result;
|
||||||
log.data(' Face: N/A');
|
})
|
||||||
}
|
.catch(() => log.error('human detect error'));
|
||||||
});
|
|
||||||
}
|
}
|
||||||
initial = false;
|
initial = false;
|
||||||
});
|
});
|
||||||
|
|
|
@ -4,14 +4,15 @@
|
||||||
* Requires [node-fetch](https://www.npmjs.com/package/node-fetch) to provide `fetch` functionality in NodeJS environment
|
* Requires [node-fetch](https://www.npmjs.com/package/node-fetch) to provide `fetch` functionality in NodeJS environment
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const process = require('process');
|
const process = require('process');
|
||||||
|
|
||||||
let fetch; // fetch is dynamically imported later
|
let fetch; // fetch is dynamically imported later
|
||||||
|
|
||||||
const tf = require('@tensorflow/tfjs-node'); // in nodejs environments tfjs-node is required to be loaded before human
|
// in nodejs environments tfjs-node is required to be loaded before human
|
||||||
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
|
||||||
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
|
||||||
|
|
||||||
|
@ -55,7 +56,7 @@ async function init() {
|
||||||
const loaded = Object.keys(human.models).filter((a) => human.models[a]);
|
const loaded = Object.keys(human.models).filter((a) => human.models[a]);
|
||||||
log.info('Loaded:', loaded);
|
log.info('Loaded:', loaded);
|
||||||
// log.info('Memory state:', human.tf.engine().memory());
|
// log.info('Memory state:', human.tf.engine().memory());
|
||||||
log.data(tf.backend()['binding'] ? tf.backend()['binding']['TF_Version'] : null);
|
log.data(tf.backend().binding ? tf.backend().binding.TF_Version : null);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function detect(input) {
|
async function detect(input) {
|
||||||
|
@ -88,7 +89,7 @@ async function detect(input) {
|
||||||
});
|
});
|
||||||
|
|
||||||
// image shape contains image dimensions and depth
|
// image shape contains image dimensions and depth
|
||||||
log.state('Processing:', tensor['shape']);
|
log.state('Processing:', tensor.shape);
|
||||||
|
|
||||||
// run actual detection
|
// run actual detection
|
||||||
let result;
|
let result;
|
||||||
|
@ -191,7 +192,7 @@ async function main() {
|
||||||
log.configure({ inspect: { breakLength: 265 } });
|
log.configure({ inspect: { breakLength: 265 } });
|
||||||
log.header();
|
log.header();
|
||||||
log.info('Current folder:', process.env.PWD);
|
log.info('Current folder:', process.env.PWD);
|
||||||
fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-extraneous-require, node/no-missing-import
|
fetch = (await import('node-fetch')).default; // eslint-disable-line node/no-unpublished-import
|
||||||
await init();
|
await init();
|
||||||
const f = process.argv[2];
|
const f = process.argv[2];
|
||||||
if (process.argv.length !== 3) {
|
if (process.argv.length !== 3) {
|
||||||
|
@ -199,20 +200,18 @@ async function main() {
|
||||||
await test();
|
await test();
|
||||||
} else if (!fs.existsSync(f) && !f.startsWith('http')) {
|
} else if (!fs.existsSync(f) && !f.startsWith('http')) {
|
||||||
log.error(`File not found: ${process.argv[2]}`);
|
log.error(`File not found: ${process.argv[2]}`);
|
||||||
} else {
|
} else if (fs.existsSync(f)) {
|
||||||
if (fs.existsSync(f)) {
|
const stat = fs.statSync(f);
|
||||||
const stat = fs.statSync(f);
|
if (stat.isDirectory()) {
|
||||||
if (stat.isDirectory()) {
|
const dir = fs.readdirSync(f);
|
||||||
const dir = fs.readdirSync(f);
|
for (const file of dir) {
|
||||||
for (const file of dir) {
|
await detect(path.join(f, file));
|
||||||
await detect(path.join(f, file));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
await detect(f);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
await detect(f);
|
await detect(f);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
await detect(f);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,9 +10,10 @@
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const process = require('process');
|
const process = require('process');
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
|
||||||
const canvas = require('canvas'); // eslint-disable-line node/no-extraneous-require, node/no-missing-require
|
const canvas = require('canvas'); // eslint-disable-line node/no-unpublished-require
|
||||||
const tf = require('@tensorflow/tfjs-node-gpu'); // for nodejs, `tfjs-node` or `tfjs-node-gpu` should be loaded before using Human
|
// for nodejs, `tfjs-node` or `tfjs-node-gpu` should be loaded before using Human
|
||||||
|
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
|
||||||
const Human = require('../../dist/human.node-gpu.js'); // this is 'const Human = require('../dist/human.node-gpu.js').default;'
|
const Human = require('../../dist/human.node-gpu.js'); // this is 'const Human = require('../dist/human.node-gpu.js').default;'
|
||||||
|
|
||||||
const config = { // just enable all and leave default settings
|
const config = { // just enable all and leave default settings
|
||||||
|
@ -72,14 +73,12 @@ async function main() {
|
||||||
const outputCtx = outputCanvas.getContext('2d');
|
const outputCtx = outputCanvas.getContext('2d');
|
||||||
const inputImage = await canvas.loadImage(buffer); // load image using canvas library
|
const inputImage = await canvas.loadImage(buffer); // load image using canvas library
|
||||||
outputCtx.drawImage(inputImage, 0, 0); // draw input image onto canvas
|
outputCtx.drawImage(inputImage, 0, 0); // draw input image onto canvas
|
||||||
// @ts-ignore
|
|
||||||
human.draw.all(outputCanvas, result); // use human build-in method to draw results as overlays on canvas
|
human.draw.all(outputCanvas, result); // use human build-in method to draw results as overlays on canvas
|
||||||
const outFile = path.join(outDir, image);
|
const outFile = path.join(outDir, image);
|
||||||
const outStream = fs.createWriteStream(outFile); // write canvas to new image file
|
const outStream = fs.createWriteStream(outFile); // write canvas to new image file
|
||||||
outStream.on('finish', () => log.state('Output image:', outFile, outputCanvas.width, outputCanvas.height));
|
outStream.on('finish', () => log.state('Output image:', outFile, outputCanvas.width, outputCanvas.height));
|
||||||
outStream.on('error', (err) => log.error('Output error:', outFile, err));
|
outStream.on('error', (err) => log.error('Output error:', outFile, err));
|
||||||
const stream = outputCanvas.createJPEGStream({ quality: 0.5, progressive: true, chromaSubsampling: true });
|
const stream = outputCanvas.createJPEGStream({ quality: 0.5, progressive: true, chromaSubsampling: true });
|
||||||
// @ts-ignore
|
|
||||||
stream.pipe(outStream);
|
stream.pipe(outStream);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,13 +24,13 @@
|
||||||
a:hover { color: lightskyblue; text-decoration: none; }
|
a:hover { color: lightskyblue; text-decoration: none; }
|
||||||
.row { width: 90vw; margin: auto; margin-top: 100px; text-align: center; }
|
.row { width: 90vw; margin: auto; margin-top: 100px; text-align: center; }
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="row text-center">
|
<div class="row text-center">
|
||||||
<h1>
|
<h1>
|
||||||
<a href="/">Human: Offline</a><br>
|
<a href="/">Human: Offline</a><br>
|
||||||
<img alt="icon" src="../assets/icon.png">
|
<img alt="icon" src="../assets/icon.png">
|
||||||
</h1>
|
</h1>
|
||||||
</div>
|
</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -4,6 +4,6 @@
|
||||||
author: <https://github.com/vladmandic>'
|
author: <https://github.com/vladmandic>'
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import{Human as p}from"../../dist/human.esm.js";var w={async:!1,modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},e=new p(w);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var t={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},o={detectFPS:0,drawFPS:0,frames:0,averageMs:0},i=(...a)=>{t.log.innerText+=a.join(" ")+`
|
import*as c from"../../dist/human.esm.js";var w={async:!1,modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},e=new c.Human(w);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var t={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},i=(...a)=>{t.log.innerText+=a.join(" ")+`
|
||||||
`,console.log(...a)},r=a=>t.fps.innerText=a,b=a=>t.perf.innerText="tensors:"+e.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth},height:{ideal:document.body.clientHeight}}},d=await navigator.mediaDevices.getUserMedia(a),m=new Promise(u=>{t.video.onloadeddata=()=>u(!0)});t.video.srcObject=d,t.video.play(),await m,t.canvas.width=t.video.videoWidth,t.canvas.height=t.video.videoHeight;let s=d.getVideoTracks()[0],f=s.getCapabilities?s.getCapabilities():"",v=s.getSettings?s.getSettings():"",g=s.getConstraints?s.getConstraints():"";i("video:",t.video.videoWidth,t.video.videoHeight,s.label,{stream:d,track:s,settings:v,constraints:g,capabilities:f}),t.canvas.onclick=()=>{t.video.paused?t.video.play():t.video.pause()}}async function c(){if(!t.video.paused){n.start===0&&(n.start=e.now()),await e.detect(t.video);let a=e.tf.memory().numTensors;a-n.tensors!==0&&i("allocated tensors:",a-n.tensors),n.tensors=a,o.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,o.frames++,o.averageMs=Math.round(1e3*(e.now()-n.start)/o.frames)/1e3,o.frames%100===0&&!t.video.paused&&i("performance",{...o,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(c)}async function l(){if(!t.video.paused){let d=await e.next(e.result);e.config.filter.flip?await e.draw.canvas(d.canvas,t.canvas):await e.draw.canvas(t.video,t.canvas),await e.draw.all(t.canvas,d),b(d.performance)}let a=e.now();o.drawFPS=Math.round(1e3*1e3/(a-n.draw))/1e3,n.draw=a,r(t.video.paused?"paused":`fps: ${o.detectFPS.toFixed(1).padStart(5," ")} detect | ${o.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(l,30)}async function M(){i("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),i("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),i("backend:",e.tf.getBackend(),"| available:",e.env.backends),i("models stats:",e.getModelStats()),i("models loaded:",Object.values(e.models).filter(a=>a!==null).length),r("initializing..."),await e.warmup(),await h(),await c(),await l()}window.onload=M;
|
`,console.log(...a)},r=a=>t.fps.innerText=a,b=a=>t.perf.innerText="tensors:"+e.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth},height:{ideal:document.body.clientHeight}}},d=await navigator.mediaDevices.getUserMedia(a),f=new Promise(p=>{t.video.onloadeddata=()=>p(!0)});t.video.srcObject=d,t.video.play(),await f,t.canvas.width=t.video.videoWidth,t.canvas.height=t.video.videoHeight;let o=d.getVideoTracks()[0],v=o.getCapabilities?o.getCapabilities():"",g=o.getSettings?o.getSettings():"",u=o.getConstraints?o.getConstraints():"";i("video:",t.video.videoWidth,t.video.videoHeight,o.label,{stream:d,track:o,settings:g,constraints:u,capabilities:v}),t.canvas.onclick=()=>{t.video.paused?t.video.play():t.video.pause()}}async function l(){if(!t.video.paused){n.start===0&&(n.start=e.now()),await e.detect(t.video);let a=e.tf.memory().numTensors;a-n.tensors!==0&&i("allocated tensors:",a-n.tensors),n.tensors=a,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!t.video.paused&&i("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(l)}async function m(){if(!t.video.paused){let d=await e.next(e.result);e.config.filter.flip?await e.draw.canvas(d.canvas,t.canvas):await e.draw.canvas(t.video,t.canvas),await e.draw.all(t.canvas,d),b(d.performance)}let a=e.now();s.drawFPS=Math.round(1e3*1e3/(a-n.draw))/1e3,n.draw=a,r(t.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(m,30)}async function M(){i("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),i("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),i("backend:",e.tf.getBackend(),"| available:",e.env.backends),i("models stats:",e.getModelStats()),i("models loaded:",Object.values(e.models).filter(a=>a!==null).length),r("initializing..."),await e.warmup(),await h(),await l(),await m()}window.onload=M;
|
||||||
//# sourceMappingURL=index.js.map
|
//# sourceMappingURL=index.js.map
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -7,9 +7,9 @@
|
||||||
* @license MIT
|
* @license MIT
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { Human, Config } from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
|
import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
|
||||||
|
|
||||||
const humanConfig: Partial<Config> = { // user configuration for human, used to fine-tune behavior
|
const humanConfig: Partial<H.Config> = { // user configuration for human, used to fine-tune behavior
|
||||||
// backend: 'wasm' as const,
|
// backend: 'wasm' as const,
|
||||||
// wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.18.0/dist/',
|
// wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.18.0/dist/',
|
||||||
// cacheSensitivity: 0,
|
// cacheSensitivity: 0,
|
||||||
|
@ -23,9 +23,9 @@ const humanConfig: Partial<Config> = { // user configuration for human, used to
|
||||||
gesture: { enabled: true },
|
gesture: { enabled: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
const human = new Human(humanConfig); // create instance of human with overrides from user configuration
|
const human = new H.Human(humanConfig); // create instance of human with overrides from user configuration
|
||||||
|
|
||||||
human.env['perfadd'] = false; // is performance data showing instant or total values
|
human.env.perfadd = false; // is performance data showing instant or total values
|
||||||
human.draw.options.font = 'small-caps 18px "Lato"'; // set font used to draw labels when using draw methods
|
human.draw.options.font = 'small-caps 18px "Lato"'; // set font used to draw labels when using draw methods
|
||||||
human.draw.options.lineHeight = 20;
|
human.draw.options.lineHeight = 20;
|
||||||
// human.draw.options.fillPolygons = true;
|
// human.draw.options.fillPolygons = true;
|
||||||
|
@ -42,8 +42,7 @@ const fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 }; // holds calc
|
||||||
|
|
||||||
const log = (...msg) => { // helper method to output messages
|
const log = (...msg) => { // helper method to output messages
|
||||||
dom.log.innerText += msg.join(' ') + '\n';
|
dom.log.innerText += msg.join(' ') + '\n';
|
||||||
// eslint-disable-next-line no-console
|
console.log(...msg); // eslint-disable-line no-console
|
||||||
console.log(...msg);
|
|
||||||
};
|
};
|
||||||
const status = (msg) => dom.fps.innerText = msg; // print status element
|
const status = (msg) => dom.fps.innerText = msg; // print status element
|
||||||
const perf = (msg) => dom.perf.innerText = 'tensors:' + human.tf.memory().numTensors + ' | performance: ' + JSON.stringify(msg).replace(/"|{|}/g, '').replace(/,/g, ' | '); // print performance element
|
const perf = (msg) => dom.perf.innerText = 'tensors:' + human.tf.memory().numTensors + ' | performance: ' + JSON.stringify(msg).replace(/"|{|}/g, '').replace(/,/g, ' | '); // print performance element
|
||||||
|
|
|
@ -33,7 +33,7 @@
|
||||||
"clean": "build --profile clean",
|
"clean": "build --profile clean",
|
||||||
"build": "rimraf test/build.log && node build.js",
|
"build": "rimraf test/build.log && node build.js",
|
||||||
"test": "node --no-warnings --unhandled-rejections=strict --trace-uncaught test/node.js",
|
"test": "node --no-warnings --unhandled-rejections=strict --trace-uncaught test/node.js",
|
||||||
"lint": "eslint src demo test",
|
"lint": "eslint *.json *.js src demo test models",
|
||||||
"scan": "npx auditjs@latest ossi --dev --quiet"
|
"scan": "npx auditjs@latest ossi --dev --quiet"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
@ -61,6 +61,8 @@
|
||||||
"tensorflow"
|
"tensorflow"
|
||||||
],
|
],
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@html-eslint/eslint-plugin": "^0.13.2",
|
||||||
|
"@html-eslint/parser": "^0.13.2",
|
||||||
"@microsoft/api-extractor": "^7.29.3",
|
"@microsoft/api-extractor": "^7.29.3",
|
||||||
"@tensorflow/tfjs": "^3.19.0",
|
"@tensorflow/tfjs": "^3.19.0",
|
||||||
"@tensorflow/tfjs-backend-cpu": "^3.19.0",
|
"@tensorflow/tfjs-backend-cpu": "^3.19.0",
|
||||||
|
|
|
@ -32,8 +32,8 @@ const sigmoid = (x) => (1 - (1 / (1 + Math.exp(x))));
|
||||||
|
|
||||||
export async function loadDetect(config: Config): Promise<GraphModel> {
|
export async function loadDetect(config: Config): Promise<GraphModel> {
|
||||||
if (env.initial) models.detector = null;
|
if (env.initial) models.detector = null;
|
||||||
if (!models.detector && config.body['detector'] && config.body['detector']['modelPath'] || '') {
|
if (!models.detector && config.body['detector'] && config.body['detector'].modelPath || '') {
|
||||||
models.detector = await loadModel(config.body['detector']['modelPath']);
|
models.detector = await loadModel(config.body['detector'].modelPath);
|
||||||
const inputs = Object.values(models.detector.modelSignature['inputs']);
|
const inputs = Object.values(models.detector.modelSignature['inputs']);
|
||||||
inputSize.detector[0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;
|
inputSize.detector[0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;
|
||||||
inputSize.detector[1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;
|
inputSize.detector[1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;
|
||||||
|
@ -94,7 +94,7 @@ async function prepareImage(input: Tensor, size: number): Promise<Tensor> {
|
||||||
return final;
|
return final;
|
||||||
}
|
}
|
||||||
|
|
||||||
function rescaleKeypoints(keypoints: Array<BodyKeypoint>, outputSize: [number, number]): Array<BodyKeypoint> {
|
function rescaleKeypoints(keypoints: BodyKeypoint[], outputSize: [number, number]): BodyKeypoint[] {
|
||||||
for (const kpt of keypoints) { // first rescale due to padding
|
for (const kpt of keypoints) { // first rescale due to padding
|
||||||
kpt.position = [
|
kpt.position = [
|
||||||
Math.trunc(kpt.position[0] * (outputSize[0] + padding[2][0] + padding[2][1]) / outputSize[0] - padding[2][0]),
|
Math.trunc(kpt.position[0] * (outputSize[0] + padding[2][0] + padding[2][1]) / outputSize[0] - padding[2][0]),
|
||||||
|
@ -120,7 +120,7 @@ function rescaleKeypoints(keypoints: Array<BodyKeypoint>, outputSize: [number, n
|
||||||
return keypoints;
|
return keypoints;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fixKeypoints(keypoints: Array<BodyKeypoint>) {
|
async function fixKeypoints(keypoints: BodyKeypoint[]) {
|
||||||
// palm z-coord is incorrect around near-zero so we approximate it
|
// palm z-coord is incorrect around near-zero so we approximate it
|
||||||
const leftPalm = keypoints.find((k) => k.part === 'leftPalm') as BodyKeypoint;
|
const leftPalm = keypoints.find((k) => k.part === 'leftPalm') as BodyKeypoint;
|
||||||
const leftWrist = keypoints.find((k) => k.part === 'leftWrist') as BodyKeypoint;
|
const leftWrist = keypoints.find((k) => k.part === 'leftWrist') as BodyKeypoint;
|
||||||
|
@ -146,7 +146,7 @@ async function detectLandmarks(input: Tensor, config: Config, outputSize: [numbe
|
||||||
const points = await t.ld.data();
|
const points = await t.ld.data();
|
||||||
const distances = await t.world.data();
|
const distances = await t.world.data();
|
||||||
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor])); // dont need tensors after this
|
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor])); // dont need tensors after this
|
||||||
const keypointsRelative: Array<BodyKeypoint> = [];
|
const keypointsRelative: BodyKeypoint[] = [];
|
||||||
const depth = 5; // each points has x,y,z,visibility,presence
|
const depth = 5; // each points has x,y,z,visibility,presence
|
||||||
for (let i = 0; i < points.length / depth; i++) {
|
for (let i = 0; i < points.length / depth; i++) {
|
||||||
const score = sigmoid(points[depth * i + 3]);
|
const score = sigmoid(points[depth * i + 3]);
|
||||||
|
@ -159,12 +159,12 @@ async function detectLandmarks(input: Tensor, config: Config, outputSize: [numbe
|
||||||
}
|
}
|
||||||
if (poseScore < (config.body.minConfidence || 0)) return null;
|
if (poseScore < (config.body.minConfidence || 0)) return null;
|
||||||
fixKeypoints(keypointsRelative);
|
fixKeypoints(keypointsRelative);
|
||||||
const keypoints: Array<BodyKeypoint> = rescaleKeypoints(keypointsRelative, outputSize); // keypoints were relative to input image which is padded
|
const keypoints: BodyKeypoint[] = rescaleKeypoints(keypointsRelative, outputSize); // keypoints were relative to input image which is padded
|
||||||
const kpts = keypoints.map((k) => k.position);
|
const kpts = keypoints.map((k) => k.position);
|
||||||
const boxes = box.calc(kpts, [outputSize[0], outputSize[1]]); // now find boxes based on rescaled keypoints
|
const boxes = box.calc(kpts, [outputSize[0], outputSize[1]]); // now find boxes based on rescaled keypoints
|
||||||
const annotations: Record<BodyAnnotation, Point[][]> = {} as Record<BodyAnnotation, Point[][]>;
|
const annotations: Record<BodyAnnotation, Point[][]> = {} as Record<BodyAnnotation, Point[][]>;
|
||||||
for (const [name, indexes] of Object.entries(coords.connected)) {
|
for (const [name, indexes] of Object.entries(coords.connected)) {
|
||||||
const pt: Array<Point[]> = [];
|
const pt: Point[][] = [];
|
||||||
for (let i = 0; i < indexes.length - 1; i++) {
|
for (let i = 0; i < indexes.length - 1; i++) {
|
||||||
const pt0 = keypoints.find((kpt) => kpt.part === indexes[i]);
|
const pt0 = keypoints.find((kpt) => kpt.part === indexes[i]);
|
||||||
const pt1 = keypoints.find((kpt) => kpt.part === indexes[i + 1]);
|
const pt1 = keypoints.find((kpt) => kpt.part === indexes[i + 1]);
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
/* eslint-disable no-multi-spaces */
|
/* eslint-disable no-multi-spaces */
|
||||||
|
|
||||||
export const kpt: Array<string> = [
|
export const kpt: string[] = [
|
||||||
'nose', // 0
|
'nose', // 0
|
||||||
'leftEyeInside', // 1
|
'leftEyeInside', // 1
|
||||||
'leftEye', // 2
|
'leftEye', // 2
|
||||||
|
|
|
@ -11,7 +11,7 @@ const numLayers = 5;
|
||||||
const strides = [8, 16, 32, 32, 32];
|
const strides = [8, 16, 32, 32, 32];
|
||||||
|
|
||||||
export async function createAnchors() {
|
export async function createAnchors() {
|
||||||
const anchors: Array<{ x: number, y: number }> = [];
|
const anchors: { x: number, y: number }[] = [];
|
||||||
let layerId = 0;
|
let layerId = 0;
|
||||||
while (layerId < numLayers) {
|
while (layerId < numLayers) {
|
||||||
let anchorCount = 0;
|
let anchorCount = 0;
|
||||||
|
@ -59,10 +59,10 @@ export async function decode(boxesTensor: Tensor, logitsTensor: Tensor, config:
|
||||||
t.boxes = decodeBoxes(boxesTensor, anchorTensor);
|
t.boxes = decodeBoxes(boxesTensor, anchorTensor);
|
||||||
t.scores = tf.sigmoid(logitsTensor);
|
t.scores = tf.sigmoid(logitsTensor);
|
||||||
t.argmax = tf.argMax(t.scores);
|
t.argmax = tf.argMax(t.scores);
|
||||||
const i = (await t.argmax.data())[0] as number;
|
const i = (await t.argmax.data())[0];
|
||||||
const scores = await t.scores.data();
|
const scores = await t.scores.data();
|
||||||
const detected: Array<{ box: Box, boxRaw: Box, score: number }> = [];
|
const detected: { box: Box, boxRaw: Box, score: number }[] = [];
|
||||||
const minScore = (config.body['detector'] && config.body['detector']['minConfidence']) ? config.body['detector']['minConfidence'] : 0;
|
const minScore = (config.body['detector'] && config.body['detector'].minConfidence) ? config.body['detector'].minConfidence : 0;
|
||||||
if (scores[i] >= minScore) {
|
if (scores[i] >= minScore) {
|
||||||
const boxes = await t.boxes.array();
|
const boxes = await t.boxes.array();
|
||||||
const boxRaw: Box = boxes[i];
|
const boxRaw: Box = boxes[i];
|
||||||
|
|
|
@ -45,10 +45,9 @@ async function max2d(inputs, minScore): Promise<[number, number, number]> {
|
||||||
const y: number = (await div.data())[0];
|
const y: number = (await div.data())[0];
|
||||||
tf.dispose([reshaped, max, coordinates, mod, div]);
|
tf.dispose([reshaped, max, coordinates, mod, div]);
|
||||||
return [x, y, newScore];
|
return [x, y, newScore];
|
||||||
} else {
|
|
||||||
tf.dispose([reshaped, max]);
|
|
||||||
return [0, 0, newScore];
|
|
||||||
}
|
}
|
||||||
|
tf.dispose([reshaped, max]);
|
||||||
|
return [0, 0, newScore];
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image: Tensor, config: Config): Promise<BodyResult[]> {
|
export async function predict(image: Tensor, config: Config): Promise<BodyResult[]> {
|
||||||
|
@ -84,7 +83,7 @@ export async function predict(image: Tensor, config: Config): Promise<BodyResult
|
||||||
for (let id = 0; id < stack.length; id++) {
|
for (let id = 0; id < stack.length; id++) {
|
||||||
// actual processing to get coordinates and score
|
// actual processing to get coordinates and score
|
||||||
const [x, y, partScore] = await max2d(stack[id], config.body.minConfidence);
|
const [x, y, partScore] = await max2d(stack[id], config.body.minConfidence);
|
||||||
if (partScore > (config.body?.minConfidence || 0)) {
|
if (partScore > (config.body.minConfidence || 0)) {
|
||||||
cache.keypoints.push({
|
cache.keypoints.push({
|
||||||
score: Math.round(100 * partScore) / 100,
|
score: Math.round(100 * partScore) / 100,
|
||||||
part: coords.kpt[id] as BodyLandmark,
|
part: coords.kpt[id] as BodyLandmark,
|
||||||
|
@ -119,7 +118,7 @@ export async function predict(image: Tensor, config: Config): Promise<BodyResult
|
||||||
Math.max(...yRaw) - Math.min(...yRaw),
|
Math.max(...yRaw) - Math.min(...yRaw),
|
||||||
];
|
];
|
||||||
for (const [name, indexes] of Object.entries(coords.connected)) {
|
for (const [name, indexes] of Object.entries(coords.connected)) {
|
||||||
const pt: Array<Point[]> = [];
|
const pt: Point[][] = [];
|
||||||
for (let i = 0; i < indexes.length - 1; i++) {
|
for (let i = 0; i < indexes.length - 1; i++) {
|
||||||
const pt0 = cache.keypoints.find((kpt) => kpt.part === indexes[i]);
|
const pt0 = cache.keypoints.find((kpt) => kpt.part === indexes[i]);
|
||||||
const pt1 = cache.keypoints.find((kpt) => kpt.part === indexes[i + 1]);
|
const pt1 = cache.keypoints.find((kpt) => kpt.part === indexes[i + 1]);
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
export const kpt: Array<string> = [
|
export const kpt: string[] = [
|
||||||
'head',
|
'head',
|
||||||
'neck',
|
'neck',
|
||||||
'rightShoulder',
|
'rightShoulder',
|
||||||
|
|
|
@ -22,8 +22,8 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
// const boxExpandFact = 1.5; // increase to 150%
|
// const boxExpandFact = 1.5; // increase to 150%
|
||||||
|
|
||||||
const cache: {
|
const cache: {
|
||||||
boxes: Array<Box>, // unused
|
boxes: Box[], // unused
|
||||||
bodies: Array<BodyResult>;
|
bodies: BodyResult[];
|
||||||
last: number,
|
last: number,
|
||||||
} = {
|
} = {
|
||||||
boxes: [],
|
boxes: [],
|
||||||
|
@ -44,7 +44,7 @@ export async function load(config: Config): Promise<GraphModel> {
|
||||||
|
|
||||||
async function parseSinglePose(res, config, image) {
|
async function parseSinglePose(res, config, image) {
|
||||||
const kpt = res[0][0];
|
const kpt = res[0][0];
|
||||||
const keypoints: Array<BodyKeypoint> = [];
|
const keypoints: BodyKeypoint[] = [];
|
||||||
let score = 0;
|
let score = 0;
|
||||||
for (let id = 0; id < kpt.length; id++) {
|
for (let id = 0; id < kpt.length; id++) {
|
||||||
score = kpt[id][2];
|
score = kpt[id][2];
|
||||||
|
@ -62,11 +62,11 @@ async function parseSinglePose(res, config, image) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
|
score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
|
||||||
const bodies: Array<BodyResult> = [];
|
const bodies: BodyResult[] = [];
|
||||||
const newBox = box.calc(keypoints.map((pt) => pt.position), [image.shape[2], image.shape[1]]);
|
const newBox = box.calc(keypoints.map((pt) => pt.position), [image.shape[2], image.shape[1]]);
|
||||||
const annotations: Record<string, Point[][]> = {};
|
const annotations: Record<string, Point[][]> = {};
|
||||||
for (const [name, indexes] of Object.entries(coords.connected)) {
|
for (const [name, indexes] of Object.entries(coords.connected)) {
|
||||||
const pt: Array<Point[]> = [];
|
const pt: Point[][] = [];
|
||||||
for (let i = 0; i < indexes.length - 1; i++) {
|
for (let i = 0; i < indexes.length - 1; i++) {
|
||||||
const pt0 = keypoints.find((kp) => kp.part === indexes[i]);
|
const pt0 = keypoints.find((kp) => kp.part === indexes[i]);
|
||||||
const pt1 = keypoints.find((kp) => kp.part === indexes[i + 1]);
|
const pt1 = keypoints.find((kp) => kp.part === indexes[i + 1]);
|
||||||
|
@ -81,12 +81,12 @@ async function parseSinglePose(res, config, image) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function parseMultiPose(res, config, image) {
|
async function parseMultiPose(res, config, image) {
|
||||||
const bodies: Array<BodyResult> = [];
|
const bodies: BodyResult[] = [];
|
||||||
for (let id = 0; id < res[0].length; id++) {
|
for (let id = 0; id < res[0].length; id++) {
|
||||||
const kpt = res[0][id];
|
const kpt = res[0][id];
|
||||||
const totalScore = Math.round(100 * kpt[51 + 4]) / 100;
|
const totalScore = Math.round(100 * kpt[51 + 4]) / 100;
|
||||||
if (totalScore > config.body.minConfidence) {
|
if (totalScore > config.body.minConfidence) {
|
||||||
const keypoints: Array<BodyKeypoint> = [];
|
const keypoints: BodyKeypoint[] = [];
|
||||||
for (let i = 0; i < 17; i++) {
|
for (let i = 0; i < 17; i++) {
|
||||||
const score = kpt[3 * i + 2];
|
const score = kpt[3 * i + 2];
|
||||||
if (score > config.body.minConfidence) {
|
if (score > config.body.minConfidence) {
|
||||||
|
@ -105,7 +105,7 @@ async function parseMultiPose(res, config, image) {
|
||||||
// const box: Box = [Math.trunc(boxRaw[0] * (image.shape[2] || 0)), Math.trunc(boxRaw[1] * (image.shape[1] || 0)), Math.trunc(boxRaw[2] * (image.shape[2] || 0)), Math.trunc(boxRaw[3] * (image.shape[1] || 0))];
|
// const box: Box = [Math.trunc(boxRaw[0] * (image.shape[2] || 0)), Math.trunc(boxRaw[1] * (image.shape[1] || 0)), Math.trunc(boxRaw[2] * (image.shape[2] || 0)), Math.trunc(boxRaw[3] * (image.shape[1] || 0))];
|
||||||
const annotations: Record<BodyAnnotation, Point[][]> = {} as Record<BodyAnnotation, Point[][]>;
|
const annotations: Record<BodyAnnotation, Point[][]> = {} as Record<BodyAnnotation, Point[][]>;
|
||||||
for (const [name, indexes] of Object.entries(coords.connected)) {
|
for (const [name, indexes] of Object.entries(coords.connected)) {
|
||||||
const pt: Array<Point[]> = [];
|
const pt: Point[][] = [];
|
||||||
for (let i = 0; i < indexes.length - 1; i++) {
|
for (let i = 0; i < indexes.length - 1; i++) {
|
||||||
const pt0 = keypoints.find((kp) => kp.part === indexes[i]);
|
const pt0 = keypoints.find((kp) => kp.part === indexes[i]);
|
||||||
const pt1 = keypoints.find((kp) => kp.part === indexes[i + 1]);
|
const pt1 = keypoints.find((kp) => kp.part === indexes[i + 1]);
|
||||||
|
@ -124,7 +124,7 @@ async function parseMultiPose(res, config, image) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(input: Tensor, config: Config): Promise<BodyResult[]> {
|
export async function predict(input: Tensor, config: Config): Promise<BodyResult[]> {
|
||||||
if (!model || !model?.inputs[0].shape) return []; // something is wrong with the model
|
if (!model || !model.inputs[0].shape) return []; // something is wrong with the model
|
||||||
if (!config.skipAllowed) cache.boxes.length = 0; // allowed to use cache or not
|
if (!config.skipAllowed) cache.boxes.length = 0; // allowed to use cache or not
|
||||||
skipped++; // increment skip frames
|
skipped++; // increment skip frames
|
||||||
const skipTime = (config.body.skipTime || 0) > (now() - cache.last);
|
const skipTime = (config.body.skipTime || 0) > (now() - cache.last);
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
export const kpt: Array<string> = [ // used to create part labels
|
export const kpt: string[] = [ // used to create part labels
|
||||||
'nose',
|
'nose',
|
||||||
'leftEye',
|
'leftEye',
|
||||||
'rightEye',
|
'rightEye',
|
||||||
|
@ -18,7 +18,7 @@ export const kpt: Array<string> = [ // used to create part labels
|
||||||
'rightAnkle',
|
'rightAnkle',
|
||||||
];
|
];
|
||||||
|
|
||||||
export const horizontal: Array<string[]> = [ // used to fix left vs right
|
export const horizontal: string[][] = [ // used to fix left vs right
|
||||||
['leftEye', 'rightEye'],
|
['leftEye', 'rightEye'],
|
||||||
['leftEar', 'rightEar'],
|
['leftEar', 'rightEar'],
|
||||||
['leftShoulder', 'rightShoulder'],
|
['leftShoulder', 'rightShoulder'],
|
||||||
|
@ -29,14 +29,14 @@ export const horizontal: Array<string[]> = [ // used to fix left vs right
|
||||||
['leftAnkle', 'rightAnkle'],
|
['leftAnkle', 'rightAnkle'],
|
||||||
];
|
];
|
||||||
|
|
||||||
export const vertical: Array<string[]> = [ // used to remove unlikely keypoint positions
|
export const vertical: string[][] = [ // used to remove unlikely keypoint positions
|
||||||
['leftKnee', 'leftShoulder'],
|
['leftKnee', 'leftShoulder'],
|
||||||
['rightKnee', 'rightShoulder'],
|
['rightKnee', 'rightShoulder'],
|
||||||
['leftAnkle', 'leftKnee'],
|
['leftAnkle', 'leftKnee'],
|
||||||
['rightAnkle', 'rightKnee'],
|
['rightAnkle', 'rightKnee'],
|
||||||
];
|
];
|
||||||
|
|
||||||
export const relative: Array<string[][]> = [ // used to match relative body parts
|
export const relative: string[][][] = [ // used to match relative body parts
|
||||||
[['leftHip', 'rightHip'], ['leftShoulder', 'rightShoulder']],
|
[['leftHip', 'rightHip'], ['leftShoulder', 'rightShoulder']],
|
||||||
[['leftElbow', 'rightElbow'], ['leftShoulder', 'rightShoulder']],
|
[['leftElbow', 'rightElbow'], ['leftShoulder', 'rightShoulder']],
|
||||||
];
|
];
|
||||||
|
|
|
@ -7,7 +7,7 @@ import type { Tensor } from '../tfjs/types';
|
||||||
const maxJitter = 0.005; // default allowed jitter is within 0.5%
|
const maxJitter = 0.005; // default allowed jitter is within 0.5%
|
||||||
|
|
||||||
const cache: {
|
const cache: {
|
||||||
keypoints: Array<BodyKeypoint>,
|
keypoints: BodyKeypoint[],
|
||||||
padding: [number, number][];
|
padding: [number, number][];
|
||||||
} = {
|
} = {
|
||||||
keypoints: [],
|
keypoints: [],
|
||||||
|
@ -57,7 +57,7 @@ export function bodyParts(body: BodyResult) { // model sometimes mixes up left v
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function jitter(keypoints: Array<BodyKeypoint>): Array<BodyKeypoint> {
|
export function jitter(keypoints: BodyKeypoint[]): BodyKeypoint[] {
|
||||||
for (let i = 0; i < keypoints.length; i++) {
|
for (let i = 0; i < keypoints.length; i++) {
|
||||||
if (keypoints[i] && cache.keypoints[i]) {
|
if (keypoints[i] && cache.keypoints[i]) {
|
||||||
const diff = [Math.abs(keypoints[i].positionRaw[0] - cache.keypoints[i].positionRaw[0]), Math.abs(keypoints[i].positionRaw[1] - cache.keypoints[i].positionRaw[1])];
|
const diff = [Math.abs(keypoints[i].positionRaw[0] - cache.keypoints[i].positionRaw[0]), Math.abs(keypoints[i].positionRaw[1] - cache.keypoints[i].positionRaw[1])];
|
||||||
|
|
|
@ -134,7 +134,7 @@ function getInstanceScore(existingPoses, keypoints) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function decode(offsets, scores, displacementsFwd, displacementsBwd, maxDetected, minConfidence) {
|
export function decode(offsets, scores, displacementsFwd, displacementsBwd, maxDetected, minConfidence) {
|
||||||
const poses: Array<{ keypoints, box: Box, score: number }> = [];
|
const poses: { keypoints, box: Box, score: number }[] = [];
|
||||||
const queue = buildPartWithScoreQueue(minConfidence, scores);
|
const queue = buildPartWithScoreQueue(minConfidence, scores);
|
||||||
// Generate at most maxDetected object instances per image in decreasing root part score order.
|
// Generate at most maxDetected object instances per image in decreasing root part score order.
|
||||||
while (poses.length < maxDetected && !queue.empty()) {
|
while (poses.length < maxDetected && !queue.empty()) {
|
||||||
|
@ -163,7 +163,7 @@ export async function predict(input: Tensor, config: Config): Promise<BodyResult
|
||||||
if (!model.inputs[0].shape) return [];
|
if (!model.inputs[0].shape) return [];
|
||||||
const resized = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
const resized = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||||
const normalized = tf.sub(tf.div(tf.cast(resized, 'float32'), 127.5), 1.0);
|
const normalized = tf.sub(tf.div(tf.cast(resized, 'float32'), 127.5), 1.0);
|
||||||
const results: Array<Tensor> = model.execute(normalized, poseNetOutputs) as Array<Tensor>;
|
const results: Tensor[] = model.execute(normalized, poseNetOutputs) as Tensor[];
|
||||||
const results3d = results.map((y) => tf.squeeze(y, [0]));
|
const results3d = results.map((y) => tf.squeeze(y, [0]));
|
||||||
results3d[1] = tf.sigmoid(results3d[1]); // apply sigmoid on scores
|
results3d[1] = tf.sigmoid(results3d[1]); // apply sigmoid on scores
|
||||||
return results3d;
|
return results3d;
|
||||||
|
@ -174,7 +174,7 @@ export async function predict(input: Tensor, config: Config): Promise<BodyResult
|
||||||
|
|
||||||
const decoded = await decode(buffers[0], buffers[1], buffers[2], buffers[3], config.body.maxDetected, config.body.minConfidence);
|
const decoded = await decode(buffers[0], buffers[1], buffers[2], buffers[3], config.body.maxDetected, config.body.minConfidence);
|
||||||
if (!model.inputs[0].shape) return [];
|
if (!model.inputs[0].shape) return [];
|
||||||
const scaled = utils.scalePoses(decoded, [input.shape[1], input.shape[2]], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) as BodyResult[];
|
const scaled = utils.scalePoses(decoded, [input.shape[1], input.shape[2]], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||||
return scaled;
|
return scaled;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -68,7 +68,7 @@ export function getBoundingBox(keypoints): [number, number, number, number] {
|
||||||
return [coord.minX, coord.minY, coord.maxX - coord.minX, coord.maxY - coord.minY];
|
return [coord.minX, coord.minY, coord.maxX - coord.minX, coord.maxY - coord.minY];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function scalePoses(poses, [height, width], [inputResolutionHeight, inputResolutionWidth]): Array<BodyResult> {
|
export function scalePoses(poses, [height, width], [inputResolutionHeight, inputResolutionWidth]): BodyResult[] {
|
||||||
const scaleY = height / inputResolutionHeight;
|
const scaleY = height / inputResolutionHeight;
|
||||||
const scaleX = width / inputResolutionWidth;
|
const scaleX = width / inputResolutionWidth;
|
||||||
const scalePose = (pose, i): BodyResult => ({
|
const scalePose = (pose, i): BodyResult => ({
|
||||||
|
@ -90,7 +90,7 @@ export function scalePoses(poses, [height, width], [inputResolutionHeight, input
|
||||||
|
|
||||||
// algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort
|
// algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort
|
||||||
export class MaxHeap {
|
export class MaxHeap {
|
||||||
priorityQueue: Array<unknown>; // don't touch
|
priorityQueue: unknown[]; // don't touch
|
||||||
numberOfElements: number;
|
numberOfElements: number;
|
||||||
getElementValue: unknown; // function call
|
getElementValue: unknown; // function call
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
/* eslint-disable indent */
|
|
||||||
/* eslint-disable no-multi-spaces */
|
/* eslint-disable no-multi-spaces */
|
||||||
|
|
||||||
/** Generic config type inherited by all module types */
|
/** Generic config type inherited by all module types */
|
||||||
|
|
|
@ -5,7 +5,7 @@ import type { BodyResult } from '../result';
|
||||||
import type { AnyCanvas, DrawOptions } from '../exports';
|
import type { AnyCanvas, DrawOptions } from '../exports';
|
||||||
|
|
||||||
/** draw detected bodies */
|
/** draw detected bodies */
|
||||||
export async function body(inCanvas: AnyCanvas, result: Array<BodyResult>, drawOptions?: Partial<DrawOptions>) {
|
export async function body(inCanvas: AnyCanvas, result: BodyResult[], drawOptions?: Partial<DrawOptions>) {
|
||||||
const localOptions = mergeDeep(options, drawOptions);
|
const localOptions = mergeDeep(options, drawOptions);
|
||||||
if (!result || !inCanvas) return;
|
if (!result || !inCanvas) return;
|
||||||
const ctx = getCanvasContext(inCanvas);
|
const ctx = getCanvasContext(inCanvas);
|
||||||
|
@ -16,7 +16,7 @@ export async function body(inCanvas: AnyCanvas, result: Array<BodyResult>, drawO
|
||||||
ctx.fillStyle = localOptions.color;
|
ctx.fillStyle = localOptions.color;
|
||||||
ctx.lineWidth = localOptions.lineWidth;
|
ctx.lineWidth = localOptions.lineWidth;
|
||||||
ctx.font = localOptions.font;
|
ctx.font = localOptions.font;
|
||||||
if (localOptions.drawBoxes && result[i].box && result[i].box?.length === 4) {
|
if (localOptions.drawBoxes && result[i].box && result[i].box.length === 4) {
|
||||||
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
|
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
|
||||||
if (localOptions.drawLabels) {
|
if (localOptions.drawLabels) {
|
||||||
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
||||||
|
|
|
@ -24,7 +24,7 @@ export { object } from './object';
|
||||||
export { gesture } from './gesture';
|
export { gesture } from './gesture';
|
||||||
|
|
||||||
/** draw combined person results instead of individual detection result objects */
|
/** draw combined person results instead of individual detection result objects */
|
||||||
export async function person(inCanvas: AnyCanvas, result: Array<PersonResult>, drawOptions?: Partial<DrawOptions>) {
|
export async function person(inCanvas: AnyCanvas, result: PersonResult[], drawOptions?: Partial<DrawOptions>) {
|
||||||
const localOptions = mergeDeep(options, drawOptions);
|
const localOptions = mergeDeep(options, drawOptions);
|
||||||
if (!result || !inCanvas) return;
|
if (!result || !inCanvas) return;
|
||||||
const ctx = getCanvasContext(inCanvas);
|
const ctx = getCanvasContext(inCanvas);
|
||||||
|
|
|
@ -44,24 +44,24 @@ function drawLabels(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanv
|
||||||
|
|
||||||
function drawIrisElipse(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D) {
|
function drawIrisElipse(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D) {
|
||||||
// iris: array[center, left, top, right, bottom]
|
// iris: array[center, left, top, right, bottom]
|
||||||
if (f.annotations && f.annotations['leftEyeIris'] && f.annotations['leftEyeIris'][0]) {
|
if (f.annotations && f.annotations.leftEyeIris && f.annotations.leftEyeIris[0]) {
|
||||||
ctx.strokeStyle = opt.useDepth ? 'rgba(255, 200, 255, 0.3)' : opt.color;
|
ctx.strokeStyle = opt.useDepth ? 'rgba(255, 200, 255, 0.3)' : opt.color;
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
const sizeX = Math.abs(f.annotations['leftEyeIris'][3][0] - f.annotations['leftEyeIris'][1][0]) / 2;
|
const sizeX = Math.abs(f.annotations.leftEyeIris[3][0] - f.annotations.leftEyeIris[1][0]) / 2;
|
||||||
const sizeY = Math.abs(f.annotations['leftEyeIris'][4][1] - f.annotations['leftEyeIris'][2][1]) / 2;
|
const sizeY = Math.abs(f.annotations.leftEyeIris[4][1] - f.annotations.leftEyeIris[2][1]) / 2;
|
||||||
ctx.ellipse(f.annotations['leftEyeIris'][0][0], f.annotations['leftEyeIris'][0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);
|
ctx.ellipse(f.annotations.leftEyeIris[0][0], f.annotations.leftEyeIris[0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);
|
||||||
ctx.stroke();
|
ctx.stroke();
|
||||||
if (opt.fillPolygons) {
|
if (opt.fillPolygons) {
|
||||||
ctx.fillStyle = opt.useDepth ? 'rgba(255, 255, 200, 0.3)' : opt.color;
|
ctx.fillStyle = opt.useDepth ? 'rgba(255, 255, 200, 0.3)' : opt.color;
|
||||||
ctx.fill();
|
ctx.fill();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (f.annotations && f.annotations['rightEyeIris'] && f.annotations['rightEyeIris'][0]) {
|
if (f.annotations && f.annotations.rightEyeIris && f.annotations.rightEyeIris[0]) {
|
||||||
ctx.strokeStyle = opt.useDepth ? 'rgba(255, 200, 255, 0.3)' : opt.color;
|
ctx.strokeStyle = opt.useDepth ? 'rgba(255, 200, 255, 0.3)' : opt.color;
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
const sizeX = Math.abs(f.annotations['rightEyeIris'][3][0] - f.annotations['rightEyeIris'][1][0]) / 2;
|
const sizeX = Math.abs(f.annotations.rightEyeIris[3][0] - f.annotations.rightEyeIris[1][0]) / 2;
|
||||||
const sizeY = Math.abs(f.annotations['rightEyeIris'][4][1] - f.annotations['rightEyeIris'][2][1]) / 2;
|
const sizeY = Math.abs(f.annotations.rightEyeIris[4][1] - f.annotations.rightEyeIris[2][1]) / 2;
|
||||||
ctx.ellipse(f.annotations['rightEyeIris'][0][0], f.annotations['rightEyeIris'][0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);
|
ctx.ellipse(f.annotations.rightEyeIris[0][0], f.annotations.rightEyeIris[0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);
|
||||||
ctx.stroke();
|
ctx.stroke();
|
||||||
if (opt.fillPolygons) {
|
if (opt.fillPolygons) {
|
||||||
ctx.fillStyle = opt.useDepth ? 'rgba(255, 255, 200, 0.3)' : opt.color;
|
ctx.fillStyle = opt.useDepth ? 'rgba(255, 255, 200, 0.3)' : opt.color;
|
||||||
|
@ -95,19 +95,19 @@ function drawGazeSpheres(f: FaceResult, ctx: CanvasRenderingContext2D | Offscree
|
||||||
}
|
}
|
||||||
|
|
||||||
function drawGazeArrows(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D) {
|
function drawGazeArrows(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D) {
|
||||||
if (opt.drawGaze && f.rotation?.gaze?.strength && f.rotation?.gaze?.bearing && f.annotations['leftEyeIris'] && f.annotations['rightEyeIris'] && f.annotations['leftEyeIris'][0] && f.annotations['rightEyeIris'][0]) {
|
if (opt.drawGaze && f.rotation?.gaze.strength && f.rotation.gaze.bearing && f.annotations.leftEyeIris && f.annotations.rightEyeIris && f.annotations.leftEyeIris[0] && f.annotations.rightEyeIris[0]) {
|
||||||
ctx.strokeStyle = 'pink';
|
ctx.strokeStyle = 'pink';
|
||||||
ctx.fillStyle = 'pink';
|
ctx.fillStyle = 'pink';
|
||||||
const leftGaze = [
|
const leftGaze = [
|
||||||
f.annotations['leftEyeIris'][0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),
|
f.annotations.leftEyeIris[0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),
|
||||||
f.annotations['leftEyeIris'][0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),
|
f.annotations.leftEyeIris[0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),
|
||||||
];
|
];
|
||||||
arrow(ctx, [f.annotations['leftEyeIris'][0][0], f.annotations['leftEyeIris'][0][1]], [leftGaze[0], leftGaze[1]], 4);
|
arrow(ctx, [f.annotations.leftEyeIris[0][0], f.annotations.leftEyeIris[0][1]], [leftGaze[0], leftGaze[1]], 4);
|
||||||
const rightGaze = [
|
const rightGaze = [
|
||||||
f.annotations['rightEyeIris'][0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),
|
f.annotations.rightEyeIris[0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),
|
||||||
f.annotations['rightEyeIris'][0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),
|
f.annotations.rightEyeIris[0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),
|
||||||
];
|
];
|
||||||
arrow(ctx, [f.annotations['rightEyeIris'][0][0], f.annotations['rightEyeIris'][0][1]], [rightGaze[0], rightGaze[1]], 4);
|
arrow(ctx, [f.annotations.rightEyeIris[0][0], f.annotations.rightEyeIris[0][1]], [rightGaze[0], rightGaze[1]], 4);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -149,7 +149,7 @@ function drawFaceBoxes(f: FaceResult, ctx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** draw detected faces */
|
/** draw detected faces */
|
||||||
export async function face(inCanvas: AnyCanvas, result: Array<FaceResult>, drawOptions?: Partial<DrawOptions>) {
|
export async function face(inCanvas: AnyCanvas, result: FaceResult[], drawOptions?: Partial<DrawOptions>) {
|
||||||
opt = mergeDeep(options, drawOptions);
|
opt = mergeDeep(options, drawOptions);
|
||||||
if (!result || !inCanvas) return;
|
if (!result || !inCanvas) return;
|
||||||
const ctx = getCanvasContext(inCanvas);
|
const ctx = getCanvasContext(inCanvas);
|
||||||
|
|
|
@ -5,7 +5,7 @@ import type { GestureResult } from '../result';
|
||||||
import type { AnyCanvas, DrawOptions } from '../exports';
|
import type { AnyCanvas, DrawOptions } from '../exports';
|
||||||
|
|
||||||
/** draw detected gestures */
|
/** draw detected gestures */
|
||||||
export async function gesture(inCanvas: AnyCanvas, result: Array<GestureResult>, drawOptions?: Partial<DrawOptions>) {
|
export async function gesture(inCanvas: AnyCanvas, result: GestureResult[], drawOptions?: Partial<DrawOptions>) {
|
||||||
const localOptions = mergeDeep(options, drawOptions);
|
const localOptions = mergeDeep(options, drawOptions);
|
||||||
if (!result || !inCanvas) return;
|
if (!result || !inCanvas) return;
|
||||||
if (localOptions.drawGestures) {
|
if (localOptions.drawGestures) {
|
||||||
|
|
|
@ -5,7 +5,7 @@ import type { HandResult } from '../result';
|
||||||
import type { AnyCanvas, DrawOptions, Point } from '../exports';
|
import type { AnyCanvas, DrawOptions, Point } from '../exports';
|
||||||
|
|
||||||
/** draw detected hands */
|
/** draw detected hands */
|
||||||
export async function hand(inCanvas: AnyCanvas, result: Array<HandResult>, drawOptions?: Partial<DrawOptions>) {
|
export async function hand(inCanvas: AnyCanvas, result: HandResult[], drawOptions?: Partial<DrawOptions>) {
|
||||||
const localOptions = mergeDeep(options, drawOptions);
|
const localOptions = mergeDeep(options, drawOptions);
|
||||||
if (!result || !inCanvas) return;
|
if (!result || !inCanvas) return;
|
||||||
const ctx = getCanvasContext(inCanvas);
|
const ctx = getCanvasContext(inCanvas);
|
||||||
|
@ -36,22 +36,22 @@ export async function hand(inCanvas: AnyCanvas, result: Array<HandResult>, drawO
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (localOptions.drawLabels && h.annotations) {
|
if (localOptions.drawLabels && h.annotations) {
|
||||||
const addHandLabel = (part: Array<Point>, title: string) => {
|
const addHandLabel = (part: Point[], title: string) => {
|
||||||
if (!part || part.length === 0 || !part[0]) return;
|
if (!part || part.length === 0 || !part[0]) return;
|
||||||
const z = part[part.length - 1][2] || -256;
|
const z = part[part.length - 1][2] || -256;
|
||||||
ctx.fillStyle = colorDepth(z, localOptions);
|
ctx.fillStyle = colorDepth(z, localOptions);
|
||||||
ctx.fillText(title, part[part.length - 1][0] + 4, part[part.length - 1][1] + 4);
|
ctx.fillText(title, part[part.length - 1][0] + 4, part[part.length - 1][1] + 4);
|
||||||
};
|
};
|
||||||
ctx.font = localOptions.font;
|
ctx.font = localOptions.font;
|
||||||
addHandLabel(h.annotations['index'], 'index');
|
addHandLabel(h.annotations.index, 'index');
|
||||||
addHandLabel(h.annotations['middle'], 'middle');
|
addHandLabel(h.annotations.middle, 'middle');
|
||||||
addHandLabel(h.annotations['ring'], 'ring');
|
addHandLabel(h.annotations.ring, 'ring');
|
||||||
addHandLabel(h.annotations['pinky'], 'pinky');
|
addHandLabel(h.annotations.pinky, 'pinky');
|
||||||
addHandLabel(h.annotations['thumb'], 'thumb');
|
addHandLabel(h.annotations.thumb, 'thumb');
|
||||||
addHandLabel(h.annotations['palm'], 'palm');
|
addHandLabel(h.annotations.palm, 'palm');
|
||||||
}
|
}
|
||||||
if (localOptions.drawPolygons && h.annotations) {
|
if (localOptions.drawPolygons && h.annotations) {
|
||||||
const addHandLine = (part: Array<Point>) => {
|
const addHandLine = (part: Point[]) => {
|
||||||
if (!part || part.length === 0 || !part[0]) return;
|
if (!part || part.length === 0 || !part[0]) return;
|
||||||
for (let i = 0; i < part.length; i++) {
|
for (let i = 0; i < part.length; i++) {
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
|
@ -63,11 +63,11 @@ export async function hand(inCanvas: AnyCanvas, result: Array<HandResult>, drawO
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
ctx.lineWidth = localOptions.lineWidth;
|
ctx.lineWidth = localOptions.lineWidth;
|
||||||
addHandLine(h.annotations['index']);
|
addHandLine(h.annotations.index);
|
||||||
addHandLine(h.annotations['middle']);
|
addHandLine(h.annotations.middle);
|
||||||
addHandLine(h.annotations['ring']);
|
addHandLine(h.annotations.ring);
|
||||||
addHandLine(h.annotations['pinky']);
|
addHandLine(h.annotations.pinky);
|
||||||
addHandLine(h.annotations['thumb']);
|
addHandLine(h.annotations.thumb);
|
||||||
// addPart(h.annotations.palm);
|
// addPart(h.annotations.palm);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ import type { ObjectResult } from '../result';
|
||||||
import type { AnyCanvas, DrawOptions } from '../exports';
|
import type { AnyCanvas, DrawOptions } from '../exports';
|
||||||
|
|
||||||
/** draw detected objects */
|
/** draw detected objects */
|
||||||
export async function object(inCanvas: AnyCanvas, result: Array<ObjectResult>, drawOptions?: Partial<DrawOptions>) {
|
export async function object(inCanvas: AnyCanvas, result: ObjectResult[], drawOptions?: Partial<DrawOptions>) {
|
||||||
const localOptions = mergeDeep(options, drawOptions);
|
const localOptions = mergeDeep(options, drawOptions);
|
||||||
if (!result || !inCanvas) return;
|
if (!result || !inCanvas) return;
|
||||||
const ctx = getCanvasContext(inCanvas);
|
const ctx = getCanvasContext(inCanvas);
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
/** Draw Options
|
/** Draw Options
|
||||||
* - Accessed via `human.draw.options` or provided per each draw method as the drawOptions optional parameter
|
* - Accessed via `human.draw.options` or provided per each draw method as the drawOptions optional parameter
|
||||||
*/
|
*/
|
||||||
export type DrawOptions = {
|
export interface DrawOptions {
|
||||||
/** draw line color */
|
/** draw line color */
|
||||||
color: string,
|
color: string,
|
||||||
/** alpha value used for lines */
|
/** alpha value used for lines */
|
||||||
|
|
|
@ -4,7 +4,7 @@ type Vector = [number, number, number];
|
||||||
|
|
||||||
const calculateGaze = (face: FaceResult): { bearing: number, strength: number } => {
|
const calculateGaze = (face: FaceResult): { bearing: number, strength: number } => {
|
||||||
const radians = (pt1: Point, pt2: Point) => Math.atan2(pt1[1] - pt2[1], pt1[0] - pt2[0]); // function to calculate angle between any two points
|
const radians = (pt1: Point, pt2: Point) => Math.atan2(pt1[1] - pt2[1], pt1[0] - pt2[0]); // function to calculate angle between any two points
|
||||||
if (!face.annotations['rightEyeIris'] || !face.annotations['leftEyeIris']) return { bearing: 0, strength: 0 };
|
if (!face.annotations.rightEyeIris || !face.annotations.leftEyeIris) return { bearing: 0, strength: 0 };
|
||||||
|
|
||||||
const offsetIris = [0, -0.1]; // iris center may not align with average of eye extremes
|
const offsetIris = [0, -0.1]; // iris center may not align with average of eye extremes
|
||||||
const eyeRatio = 1; // factor to normalize changes x vs y
|
const eyeRatio = 1; // factor to normalize changes x vs y
|
||||||
|
@ -54,8 +54,7 @@ export const calculateFaceAngle = (face: FaceResult, imageSize: [number, number]
|
||||||
};
|
};
|
||||||
// 3x3 rotation matrix to Euler angles based on https://www.geometrictools.com/Documentation/EulerAngles.pdf
|
// 3x3 rotation matrix to Euler angles based on https://www.geometrictools.com/Documentation/EulerAngles.pdf
|
||||||
const rotationMatrixToEulerAngle = (r: number[]): { pitch: number, yaw: number, roll: number } => {
|
const rotationMatrixToEulerAngle = (r: number[]): { pitch: number, yaw: number, roll: number } => {
|
||||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
const [r00, _r01, _r02, r10, r11, r12, r20, r21, r22] = r; // eslint-disable-line @typescript-eslint/no-unused-vars
|
||||||
const [r00, _r01, _r02, r10, r11, r12, r20, r21, r22] = r;
|
|
||||||
let thetaX: number;
|
let thetaX: number;
|
||||||
let thetaY: number;
|
let thetaY: number;
|
||||||
let thetaZ: number;
|
let thetaZ: number;
|
||||||
|
@ -74,9 +73,9 @@ export const calculateFaceAngle = (face: FaceResult, imageSize: [number, number]
|
||||||
thetaY = Math.atan2(r21, r22);
|
thetaY = Math.atan2(r21, r22);
|
||||||
thetaX = 0;
|
thetaX = 0;
|
||||||
}
|
}
|
||||||
if (isNaN(thetaX)) thetaX = 0;
|
if (Number.isNaN(thetaX)) thetaX = 0;
|
||||||
if (isNaN(thetaY)) thetaY = 0;
|
if (Number.isNaN(thetaY)) thetaY = 0;
|
||||||
if (isNaN(thetaZ)) thetaZ = 0;
|
if (Number.isNaN(thetaZ)) thetaZ = 0;
|
||||||
return { pitch: 2 * -thetaX, yaw: 2 * -thetaY, roll: 2 * -thetaZ };
|
return { pitch: 2 * -thetaX, yaw: 2 * -thetaY, roll: 2 * -thetaZ };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -99,18 +98,18 @@ export const calculateFaceAngle = (face: FaceResult, imageSize: [number, number]
|
||||||
// top, bottom, left, right
|
// top, bottom, left, right
|
||||||
const pts: Point[] = [mesh[10], mesh[152], mesh[234], mesh[454]].map((pt) => [pt[0] * imageSize[0] / size, pt[1] * imageSize[1] / size, pt[2]] as Point); // make the xyz coordinates proportional, independent of the image/box size
|
const pts: Point[] = [mesh[10], mesh[152], mesh[234], mesh[454]].map((pt) => [pt[0] * imageSize[0] / size, pt[1] * imageSize[1] / size, pt[2]] as Point); // make the xyz coordinates proportional, independent of the image/box size
|
||||||
|
|
||||||
const y_axis = normalize(subVectors(pts[1] as Vector, pts[0] as Vector));
|
const yAxis = normalize(subVectors(pts[1] as Vector, pts[0] as Vector));
|
||||||
let x_axis = normalize(subVectors(pts[3] as Vector, pts[2] as Vector));
|
let xAxis = normalize(subVectors(pts[3] as Vector, pts[2] as Vector));
|
||||||
const z_axis = normalize(crossVectors(x_axis, y_axis));
|
const zAxis = normalize(crossVectors(xAxis, yAxis));
|
||||||
// adjust x_axis to make sure that all axes are perpendicular to each other
|
// adjust xAxis to make sure that all axes are perpendicular to each other
|
||||||
x_axis = crossVectors(y_axis, z_axis);
|
xAxis = crossVectors(yAxis, zAxis);
|
||||||
|
|
||||||
// Rotation Matrix from Axis Vectors - http://renderdan.blogspot.com/2006/05/rotation-matrix-from-axis-vectors.html
|
// Rotation Matrix from Axis Vectors - http://renderdan.blogspot.com/2006/05/rotation-matrix-from-axis-vectors.html
|
||||||
// 3x3 rotation matrix is flatten to array in row-major order. Note that the rotation represented by this matrix is inverted.
|
// 3x3 rotation matrix is flatten to array in row-major order. Note that the rotation represented by this matrix is inverted.
|
||||||
const matrix: [number, number, number, number, number, number, number, number, number] = [
|
const matrix: [number, number, number, number, number, number, number, number, number] = [
|
||||||
x_axis[0], x_axis[1], x_axis[2],
|
xAxis[0], xAxis[1], xAxis[2],
|
||||||
y_axis[0], y_axis[1], y_axis[2],
|
yAxis[0], yAxis[1], yAxis[2],
|
||||||
z_axis[0], z_axis[1], z_axis[2],
|
zAxis[0], zAxis[1], zAxis[2],
|
||||||
];
|
];
|
||||||
const angle = rotationMatrixToEulerAngle(matrix);
|
const angle = rotationMatrixToEulerAngle(matrix);
|
||||||
// const angle = meshToEulerAngle(mesh);
|
// const angle = meshToEulerAngle(mesh);
|
||||||
|
|
|
@ -10,7 +10,7 @@ import { loadModel } from '../tfjs/load';
|
||||||
import { env } from '../util/env';
|
import { env } from '../util/env';
|
||||||
|
|
||||||
let model: GraphModel | null;
|
let model: GraphModel | null;
|
||||||
const cached: Array<number> = [];
|
const cached: number[] = [];
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
let lastCount = 0;
|
let lastCount = 0;
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
|
|
|
@ -20,7 +20,7 @@ let anchors: Tensor | null = null;
|
||||||
let inputSize = 0;
|
let inputSize = 0;
|
||||||
let inputSizeT: Tensor | null = null;
|
let inputSizeT: Tensor | null = null;
|
||||||
|
|
||||||
type DetectBox = { startPoint: Point, endPoint: Point, landmarks: Array<Point>, confidence: number };
|
interface DetectBox { startPoint: Point, endPoint: Point, landmarks: Point[], confidence: number }
|
||||||
|
|
||||||
export const size = () => inputSize;
|
export const size = () => inputSize;
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ export async function getBoxes(inputImage: Tensor, config: Config) {
|
||||||
t.scores = tf.squeeze(t.sigmoid);
|
t.scores = tf.squeeze(t.sigmoid);
|
||||||
t.nms = await tf.image.nonMaxSuppressionAsync(t.boxes, t.scores, (config.face.detector?.maxDetected || 0), (config.face.detector?.iouThreshold || 0), (config.face.detector?.minConfidence || 0));
|
t.nms = await tf.image.nonMaxSuppressionAsync(t.boxes, t.scores, (config.face.detector?.maxDetected || 0), (config.face.detector?.iouThreshold || 0), (config.face.detector?.minConfidence || 0));
|
||||||
const nms = await t.nms.array() as number[];
|
const nms = await t.nms.array() as number[];
|
||||||
const boxes: Array<DetectBox> = [];
|
const boxes: DetectBox[] = [];
|
||||||
const scores = await t.scores.data();
|
const scores = await t.scores.data();
|
||||||
for (let i = 0; i < nms.length; i++) {
|
for (let i = 0; i < nms.length; i++) {
|
||||||
const confidence = scores[nms[i]];
|
const confidence = scores[nms[i]];
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
// @tensorflow/tfjs-models/face-landmark-detection/src/constants.ts
|
// @tensorflow/tfjs-models/face-landmark-detection/src/constants.ts
|
||||||
// https://github.com/google/mediapipe/mediapipe/python/solutions/face_mesh_connections.py
|
// https://github.com/google/mediapipe/mediapipe/python/solutions/face_mesh_connections.py
|
||||||
|
|
||||||
type PairArray = Array<[number, number]>;
|
type PairArray = [number, number][];
|
||||||
|
|
||||||
const LIPS_CONNECTIONS: PairArray = [
|
const LIPS_CONNECTIONS: PairArray = [
|
||||||
[61, 146], [146, 91], [91, 181], [181, 84], [84, 17], [17, 314], [314, 405], [405, 321], [321, 375], [375, 291], [61, 185], [185, 40], [40, 39], [39, 37], [37, 0], [0, 267], [267, 269], [269, 270], [270, 409], [409, 291],
|
[61, 146], [146, 91], [91, 181], [181, 84], [84, 17], [17, 314], [314, 405], [405, 321], [321, 375], [375, 291], [61, 185], [185, 40], [40, 39], [39, 37], [37, 0], [0, 267], [267, 269], [269, 270], [270, 409], [409, 291],
|
||||||
|
@ -187,7 +187,7 @@ export const MEDIAPIPE_FACE_MESH_KEYPOINTS_BY_CONTOUR = {
|
||||||
faceOval: connectionsToIndices(FACE_OVAL_CONNECTIONS),
|
faceOval: connectionsToIndices(FACE_OVAL_CONNECTIONS),
|
||||||
};
|
};
|
||||||
|
|
||||||
const indexLabelPairs: Array<[number, string]> = Object.entries(MEDIAPIPE_FACE_MESH_KEYPOINTS_BY_CONTOUR)
|
const indexLabelPairs: [number, string][] = Object.entries(MEDIAPIPE_FACE_MESH_KEYPOINTS_BY_CONTOUR)
|
||||||
.map(([label, indices]) => indices.map((index) => [index, label] as [number, string]))
|
.map(([label, indices]) => indices.map((index) => [index, label] as [number, string]))
|
||||||
.flat();
|
.flat();
|
||||||
|
|
||||||
|
|
|
@ -22,11 +22,10 @@ import type { Tensor } from '../tfjs/types';
|
||||||
import type { Human } from '../human';
|
import type { Human } from '../human';
|
||||||
import { calculateFaceAngle } from './angles';
|
import { calculateFaceAngle } from './angles';
|
||||||
|
|
||||||
type DescRes = { age: number, gender: Gender, genderScore: number, descriptor: number[], race?: { score: number, race: Race }[] };
|
interface DescRes { age: number, gender: Gender, genderScore: number, descriptor: number[], race?: { score: number, race: Race }[] }
|
||||||
|
|
||||||
export const detectFace = async (instance: Human /* instance of human */, input: Tensor): Promise<FaceResult[]> => {
|
export const detectFace = async (instance: Human /* instance of human */, input: Tensor): Promise<FaceResult[]> => {
|
||||||
// run facemesh, includes blazeface and iris
|
// run facemesh, includes blazeface and iris
|
||||||
// eslint-disable-next-line no-async-promise-executor
|
|
||||||
let timeStamp: number = now();
|
let timeStamp: number = now();
|
||||||
let ageRes: { age: number } | Promise<{ age: number }> | null;
|
let ageRes: { age: number } | Promise<{ age: number }> | null;
|
||||||
let gearRes: gear.GearType | Promise<gear.GearType> | null;
|
let gearRes: gear.GearType | Promise<gear.GearType> | null;
|
||||||
|
@ -38,7 +37,7 @@ export const detectFace = async (instance: Human /* instance of human */, input:
|
||||||
let livenessRes: number | Promise<number> | null;
|
let livenessRes: number | Promise<number> | null;
|
||||||
let descRes: DescRes | Promise<DescRes> | null;
|
let descRes: DescRes | Promise<DescRes> | null;
|
||||||
|
|
||||||
const faceRes: Array<FaceResult> = [];
|
const faceRes: FaceResult[] = [];
|
||||||
instance.state = 'run:face';
|
instance.state = 'run:face';
|
||||||
|
|
||||||
const faces = await facemesh.predict(input, instance.config);
|
const faces = await facemesh.predict(input, instance.config);
|
||||||
|
@ -51,7 +50,7 @@ export const detectFace = async (instance: Human /* instance of human */, input:
|
||||||
|
|
||||||
// is something went wrong, skip the face
|
// is something went wrong, skip the face
|
||||||
// @ts-ignore possibly undefied
|
// @ts-ignore possibly undefied
|
||||||
if (!faces[i].tensor || faces[i].tensor['isDisposedInternal']) {
|
if (!faces[i].tensor || faces[i].tensor.isDisposedInternal) {
|
||||||
log('Face object is disposed:', faces[i].tensor);
|
log('Face object is disposed:', faces[i].tensor);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -60,7 +59,7 @@ export const detectFace = async (instance: Human /* instance of human */, input:
|
||||||
if (instance.config.face.detector?.mask) {
|
if (instance.config.face.detector?.mask) {
|
||||||
const masked = await mask.mask(faces[i]);
|
const masked = await mask.mask(faces[i]);
|
||||||
tf.dispose(faces[i].tensor);
|
tf.dispose(faces[i].tensor);
|
||||||
faces[i].tensor = masked as Tensor;
|
if (masked) faces[i].tensor = masked;
|
||||||
}
|
}
|
||||||
|
|
||||||
// calculate face angles
|
// calculate face angles
|
||||||
|
@ -105,11 +104,11 @@ export const detectFace = async (instance: Human /* instance of human */, input:
|
||||||
// run gear, inherits face from blazeface
|
// run gear, inherits face from blazeface
|
||||||
instance.analyze('Start GEAR:');
|
instance.analyze('Start GEAR:');
|
||||||
if (instance.config.async) {
|
if (instance.config.async) {
|
||||||
gearRes = instance.config.face['gear']?.enabled ? gear.predict(faces[i].tensor || tf.tensor([]), instance.config, i, faces.length) : null;
|
gearRes = instance.config.face.gear?.enabled ? gear.predict(faces[i].tensor || tf.tensor([]), instance.config, i, faces.length) : null;
|
||||||
} else {
|
} else {
|
||||||
instance.state = 'run:gear';
|
instance.state = 'run:gear';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
gearRes = instance.config.face['gear']?.enabled ? await gear.predict(faces[i].tensor || tf.tensor([]), instance.config, i, faces.length) : null;
|
gearRes = instance.config.face.gear?.enabled ? await gear.predict(faces[i].tensor || tf.tensor([]), instance.config, i, faces.length) : null;
|
||||||
instance.performance.gear = Math.trunc(now() - timeStamp);
|
instance.performance.gear = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
instance.analyze('End GEAR:');
|
instance.analyze('End GEAR:');
|
||||||
|
@ -178,7 +177,7 @@ export const detectFace = async (instance: Human /* instance of human */, input:
|
||||||
genderScore: (genderRes as { gender: Gender, genderScore: number }).genderScore,
|
genderScore: (genderRes as { gender: Gender, genderScore: number }).genderScore,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if (instance.config.face['gear']?.enabled && gearRes) { // override age/gender/race if gear model is used
|
if (instance.config.face.gear?.enabled && gearRes) { // override age/gender/race if gear model is used
|
||||||
descRes = {
|
descRes = {
|
||||||
...(descRes as DescRes),
|
...(descRes as DescRes),
|
||||||
age: (gearRes as gear.GearType).age,
|
age: (gearRes as gear.GearType).age,
|
||||||
|
@ -218,12 +217,12 @@ export const detectFace = async (instance: Human /* instance of human */, input:
|
||||||
...faces[i],
|
...faces[i],
|
||||||
id: i,
|
id: i,
|
||||||
};
|
};
|
||||||
if ((descRes as DescRes)?.age) res.age = (descRes as DescRes).age as number;
|
if ((descRes as DescRes).age) res.age = (descRes as DescRes).age;
|
||||||
if ((descRes as DescRes)?.gender) res.gender = (descRes as DescRes).gender as Gender;
|
if ((descRes as DescRes).gender) res.gender = (descRes as DescRes).gender;
|
||||||
if ((descRes as DescRes)?.genderScore) res.genderScore = (descRes as DescRes)?.genderScore as number;
|
if ((descRes as DescRes).genderScore) res.genderScore = (descRes as DescRes).genderScore;
|
||||||
if ((descRes as DescRes)?.descriptor) res.embedding = (descRes as DescRes)?.descriptor as Array<number>;
|
if ((descRes as DescRes).descriptor) res.embedding = (descRes as DescRes).descriptor;
|
||||||
if ((descRes as DescRes)?.race) res.race = (descRes as DescRes)?.race as { score: number, race: Race }[];
|
if ((descRes as DescRes).race) res.race = (descRes as DescRes).race as { score: number, race: Race }[];
|
||||||
if (emotionRes) res.emotion = emotionRes as Array<{ score: number, emotion: Emotion }>;
|
if (emotionRes) res.emotion = emotionRes as { score: number, emotion: Emotion }[];
|
||||||
if (antispoofRes) res.real = antispoofRes as number;
|
if (antispoofRes) res.real = antispoofRes as number;
|
||||||
if (livenessRes) res.live = livenessRes as number;
|
if (livenessRes) res.live = livenessRes as number;
|
||||||
if (irisSize && irisSize !== 0) res.iris = Math.trunc(500 / irisSize / 11.7) / 100;
|
if (irisSize && irisSize !== 0) res.iris = Math.trunc(500 / irisSize / 11.7) / 100;
|
||||||
|
|
|
@ -23,7 +23,7 @@ export class FaceBoxes {
|
||||||
|
|
||||||
async estimateFaces(input, config) {
|
async estimateFaces(input, config) {
|
||||||
if (config) this.config = config;
|
if (config) this.config = config;
|
||||||
const results: Array<{ confidence: number, box: Box, boxRaw: Box, image: Tensor }> = [];
|
const results: { confidence: number, box: Box, boxRaw: Box, image: Tensor }[] = [];
|
||||||
const resizeT = tf.image.resizeBilinear(input, [this.inputSize, this.inputSize]);
|
const resizeT = tf.image.resizeBilinear(input, [this.inputSize, this.inputSize]);
|
||||||
const castT = resizeT.toInt();
|
const castT = resizeT.toInt();
|
||||||
const [scoresT, boxesT, numT] = await this.model.executeAsync(castT) as Tensor[];
|
const [scoresT, boxesT, numT] = await this.model.executeAsync(castT) as Tensor[];
|
||||||
|
@ -37,7 +37,7 @@ export class FaceBoxes {
|
||||||
castT.dispose();
|
castT.dispose();
|
||||||
resizeT.dispose();
|
resizeT.dispose();
|
||||||
for (const i in boxes) {
|
for (const i in boxes) {
|
||||||
if (scores[i] && scores[i] > (this.config.face?.detector?.minConfidence || 0.1)) {
|
if (scores[i] && scores[i] > (this.config.face.detector?.minConfidence || 0.1)) {
|
||||||
const crop = [boxes[i][0] / this.enlarge, boxes[i][1] / this.enlarge, boxes[i][2] * this.enlarge, boxes[i][3] * this.enlarge];
|
const crop = [boxes[i][0] / this.enlarge, boxes[i][1] / this.enlarge, boxes[i][2] * this.enlarge, boxes[i][3] * this.enlarge];
|
||||||
const boxRaw: Box = [crop[1], crop[0], (crop[3]) - (crop[1]), (crop[2]) - (crop[0])];
|
const boxRaw: Box = [crop[1], crop[0], (crop[3]) - (crop[1]), (crop[2]) - (crop[0])];
|
||||||
const box: Box = [
|
const box: Box = [
|
||||||
|
|
|
@ -21,7 +21,7 @@ import type { GraphModel, Tensor } from '../tfjs/types';
|
||||||
import type { FaceResult, FaceLandmark, Point } from '../result';
|
import type { FaceResult, FaceLandmark, Point } from '../result';
|
||||||
import type { Config } from '../config';
|
import type { Config } from '../config';
|
||||||
|
|
||||||
type DetectBox = { startPoint: Point, endPoint: Point, landmarks: Array<Point>, confidence: number };
|
interface DetectBox { startPoint: Point, endPoint: Point, landmarks: Point[], confidence: number }
|
||||||
|
|
||||||
const cache = {
|
const cache = {
|
||||||
boxes: [] as DetectBox[],
|
boxes: [] as DetectBox[],
|
||||||
|
@ -43,9 +43,10 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
|
||||||
} else {
|
} else {
|
||||||
cache.skipped++;
|
cache.skipped++;
|
||||||
}
|
}
|
||||||
const faces: Array<FaceResult> = [];
|
const faces: FaceResult[] = [];
|
||||||
const newCache: Array<DetectBox> = [];
|
const newCache: DetectBox[] = [];
|
||||||
let id = 0;
|
let id = 0;
|
||||||
|
const size = inputSize;
|
||||||
for (let i = 0; i < cache.boxes.length; i++) {
|
for (let i = 0; i < cache.boxes.length; i++) {
|
||||||
const box = cache.boxes[i];
|
const box = cache.boxes[i];
|
||||||
let angle = 0;
|
let angle = 0;
|
||||||
|
@ -66,10 +67,10 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
|
||||||
|
|
||||||
// optional rotation correction based on detector data only if mesh is disabled otherwise perform it later when we have more accurate mesh data. if no rotation correction this function performs crop
|
// optional rotation correction based on detector data only if mesh is disabled otherwise perform it later when we have more accurate mesh data. if no rotation correction this function performs crop
|
||||||
[angle, rotationMatrix, face.tensor] = util.correctFaceRotation(config.face.detector?.rotation, box, input, config.face.mesh?.enabled ? inputSize : blazeface.size());
|
[angle, rotationMatrix, face.tensor] = util.correctFaceRotation(config.face.detector?.rotation, box, input, config.face.mesh?.enabled ? inputSize : blazeface.size());
|
||||||
if (config?.filter?.equalization) {
|
if (config.filter.equalization) {
|
||||||
const equilized = await histogramEqualization(face.tensor as Tensor);
|
const equilized = face.tensor ? await histogramEqualization(face.tensor) : undefined;
|
||||||
tf.dispose(face.tensor);
|
tf.dispose(face.tensor);
|
||||||
face.tensor = equilized;
|
if (equilized) face.tensor = equilized;
|
||||||
}
|
}
|
||||||
face.boxScore = Math.round(100 * box.confidence) / 100;
|
face.boxScore = Math.round(100 * box.confidence) / 100;
|
||||||
if (!config.face.mesh?.enabled) { // mesh not enabled, return resuts from detector only
|
if (!config.face.mesh?.enabled) { // mesh not enabled, return resuts from detector only
|
||||||
|
@ -80,7 +81,7 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
|
||||||
((box.startPoint[0] + box.endPoint[0])) / 2 + ((box.endPoint[0] + box.startPoint[0]) * pt[0] / blazeface.size()),
|
((box.startPoint[0] + box.endPoint[0])) / 2 + ((box.endPoint[0] + box.startPoint[0]) * pt[0] / blazeface.size()),
|
||||||
((box.startPoint[1] + box.endPoint[1])) / 2 + ((box.endPoint[1] + box.startPoint[1]) * pt[1] / blazeface.size()),
|
((box.startPoint[1] + box.endPoint[1])) / 2 + ((box.endPoint[1] + box.startPoint[1]) * pt[1] / blazeface.size()),
|
||||||
]);
|
]);
|
||||||
face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]);
|
face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / size]);
|
||||||
for (const key of Object.keys(coords.blazeFaceLandmarks)) {
|
for (const key of Object.keys(coords.blazeFaceLandmarks)) {
|
||||||
face.annotations[key] = [face.mesh[coords.blazeFaceLandmarks[key] as number]]; // add annotations
|
face.annotations[key] = [face.mesh[coords.blazeFaceLandmarks[key] as number]]; // add annotations
|
||||||
}
|
}
|
||||||
|
@ -91,14 +92,14 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
|
||||||
tf.dispose(face.tensor);
|
tf.dispose(face.tensor);
|
||||||
return faces;
|
return faces;
|
||||||
}
|
}
|
||||||
const results = model.execute(face.tensor as Tensor) as Array<Tensor>;
|
const results = model.execute(face.tensor as Tensor) as Tensor[];
|
||||||
const confidenceT = results.find((t) => t.shape[t.shape.length - 1] === 1) as Tensor;
|
const confidenceT = results.find((t) => t.shape[t.shape.length - 1] === 1) as Tensor;
|
||||||
const faceConfidence = await confidenceT.data();
|
const faceConfidence = await confidenceT.data();
|
||||||
face.faceScore = Math.round(100 * faceConfidence[0]) / 100;
|
face.faceScore = Math.round(100 * faceConfidence[0]) / 100;
|
||||||
|
|
||||||
if (face.faceScore < (config.face.detector?.minConfidence || 1)) { // low confidence in detected mesh
|
if (face.faceScore < (config.face.detector?.minConfidence || 1)) { // low confidence in detected mesh
|
||||||
box.confidence = face.faceScore; // reset confidence of cached box
|
box.confidence = face.faceScore; // reset confidence of cached box
|
||||||
if (config.face.mesh?.keepInvalid) {
|
if (config.face.mesh.keepInvalid) {
|
||||||
face.box = util.clampBox(box, input);
|
face.box = util.clampBox(box, input);
|
||||||
face.boxRaw = util.getRawBox(box, input);
|
face.boxRaw = util.getRawBox(box, input);
|
||||||
face.score = face.boxScore;
|
face.score = face.boxScore;
|
||||||
|
@ -106,7 +107,7 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
|
||||||
((box.startPoint[0] + box.endPoint[0])) / 2 + ((box.endPoint[0] + box.startPoint[0]) * pt[0] / blazeface.size()),
|
((box.startPoint[0] + box.endPoint[0])) / 2 + ((box.endPoint[0] + box.startPoint[0]) * pt[0] / blazeface.size()),
|
||||||
((box.startPoint[1] + box.endPoint[1])) / 2 + ((box.endPoint[1] + box.startPoint[1]) * pt[1] / blazeface.size()),
|
((box.startPoint[1] + box.endPoint[1])) / 2 + ((box.endPoint[1] + box.startPoint[1]) * pt[1] / blazeface.size()),
|
||||||
]);
|
]);
|
||||||
face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 1), pt[1] / (input.shape[1] || 1), (pt[2] || 0) / inputSize]);
|
face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 1), pt[1] / (input.shape[1] || 1), (pt[2] || 0) / size]);
|
||||||
for (const key of Object.keys(coords.blazeFaceLandmarks)) {
|
for (const key of Object.keys(coords.blazeFaceLandmarks)) {
|
||||||
face.annotations[key] = [face.mesh[coords.blazeFaceLandmarks[key] as number]]; // add annotations
|
face.annotations[key] = [face.mesh[coords.blazeFaceLandmarks[key] as number]]; // add annotations
|
||||||
}
|
}
|
||||||
|
@ -122,7 +123,7 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
|
||||||
rawCoords = await iris.augmentIris(rawCoords, face.tensor, config, inputSize); // run iris model and augment results
|
rawCoords = await iris.augmentIris(rawCoords, face.tensor, config, inputSize); // run iris model and augment results
|
||||||
}
|
}
|
||||||
face.mesh = util.transformRawCoords(rawCoords, box, angle, rotationMatrix, inputSize); // get processed mesh
|
face.mesh = util.transformRawCoords(rawCoords, box, angle, rotationMatrix, inputSize); // get processed mesh
|
||||||
face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize]);
|
face.meshRaw = face.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / size]);
|
||||||
for (const key of Object.keys(coords.meshAnnotations)) face.annotations[key] = coords.meshAnnotations[key].map((index) => face.mesh[index]); // add annotations
|
for (const key of Object.keys(coords.meshAnnotations)) face.annotations[key] = coords.meshAnnotations[key].map((index) => face.mesh[index]); // add annotations
|
||||||
face.score = face.faceScore;
|
face.score = face.faceScore;
|
||||||
const calculatedBox = { ...util.calculateFaceBox(face.mesh, box), confidence: box.confidence, landmarks: box.landmarks };
|
const calculatedBox = { ...util.calculateFaceBox(face.mesh, box), confidence: box.confidence, landmarks: box.landmarks };
|
||||||
|
@ -148,13 +149,11 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
|
||||||
|
|
||||||
export async function load(config: Config): Promise<GraphModel> {
|
export async function load(config: Config): Promise<GraphModel> {
|
||||||
if (env.initial) model = null;
|
if (env.initial) model = null;
|
||||||
// @ts-ignore private property
|
if (config.face.attention?.enabled && model?.['signature']) {
|
||||||
if (config?.face?.attention?.enabled && model?.signature) {
|
if (Object.keys(model?.['signature']?.outputs || {}).length < 6) model = null;
|
||||||
// @ts-ignore private property
|
|
||||||
if (Object.keys(model?.signature?.outputs || {}).length < 6) model = null;
|
|
||||||
}
|
}
|
||||||
if (!model) {
|
if (!model) {
|
||||||
if (config.face.attention?.enabled) model = await loadModel(config.face.attention?.modelPath);
|
if (config.face.attention?.enabled) model = await loadModel(config.face.attention.modelPath);
|
||||||
else model = await loadModel(config.face.mesh?.modelPath);
|
else model = await loadModel(config.face.mesh?.modelPath);
|
||||||
} else if (config.debug) {
|
} else if (config.debug) {
|
||||||
log('cached model:', model['modelUrl']);
|
log('cached model:', model['modelUrl']);
|
||||||
|
|
|
@ -53,7 +53,7 @@ export const meshAnnotations: Record<string, number[]> = {
|
||||||
export const meshLandmarks: Record<string, number | number[]> = {
|
export const meshLandmarks: Record<string, number | number[]> = {
|
||||||
count: 468,
|
count: 468,
|
||||||
mouth: 13,
|
mouth: 13,
|
||||||
symmetryLine: [13, meshAnnotations['midwayBetweenEyes'][0]],
|
symmetryLine: [13, meshAnnotations.midwayBetweenEyes[0]],
|
||||||
};
|
};
|
||||||
|
|
||||||
export const blazeFaceLandmarks: Record<string, number | number[]> = {
|
export const blazeFaceLandmarks: Record<string, number | number[]> = {
|
||||||
|
@ -66,7 +66,7 @@ export const blazeFaceLandmarks: Record<string, number | number[]> = {
|
||||||
symmetryLine: [3, 2],
|
symmetryLine: [3, 2],
|
||||||
};
|
};
|
||||||
|
|
||||||
export const irisIndices: Array<{ key: string, indices: number[] }> = [ // A mapping from facemesh model keypoints to iris model keypoints.
|
export const irisIndices: { key: string, indices: number[] }[] = [ // A mapping from facemesh model keypoints to iris model keypoints.
|
||||||
{ key: 'EyeUpper0', indices: [9, 10, 11, 12, 13, 14, 15] }, // 7 x 3d
|
{ key: 'EyeUpper0', indices: [9, 10, 11, 12, 13, 14, 15] }, // 7 x 3d
|
||||||
{ key: 'EyeUpper1', indices: [25, 26, 27, 28, 29, 30, 31] }, // 7 x 3d
|
{ key: 'EyeUpper1', indices: [25, 26, 27, 28, 29, 30, 31] }, // 7 x 3d
|
||||||
{ key: 'EyeUpper2', indices: [41, 42, 43, 44, 45, 46, 47] }, // 7 x 3d
|
{ key: 'EyeUpper2', indices: [41, 42, 43, 44, 45, 46, 47] }, // 7 x 3d
|
||||||
|
@ -549,7 +549,7 @@ export const UV468: [number, number][] = [
|
||||||
[0.723330020904541, 0.363372981548309],
|
[0.723330020904541, 0.363372981548309],
|
||||||
];
|
];
|
||||||
|
|
||||||
export const TRI468: Array<number> = [
|
export const TRI468: number[] = [
|
||||||
127, 34, 139, 11, 0, 37, 232, 231, 120, 72, 37, 39, 128, 121, 47, 232, 121, 128, 104, 69, 67, 175, 171, 148, 157, 154, 155, 118, 50, 101, 73, 39, 40, 9,
|
127, 34, 139, 11, 0, 37, 232, 231, 120, 72, 37, 39, 128, 121, 47, 232, 121, 128, 104, 69, 67, 175, 171, 148, 157, 154, 155, 118, 50, 101, 73, 39, 40, 9,
|
||||||
151, 108, 48, 115, 131, 194, 204, 211, 74, 40, 185, 80, 42, 183, 40, 92, 186, 230, 229, 118, 202, 212, 214, 83, 18, 17, 76, 61, 146, 160, 29, 30, 56,
|
151, 108, 48, 115, 131, 194, 204, 211, 74, 40, 185, 80, 42, 183, 40, 92, 186, 230, 229, 118, 202, 212, 214, 83, 18, 17, 76, 61, 146, 160, 29, 30, 56,
|
||||||
157, 173, 106, 204, 194, 135, 214, 192, 203, 165, 98, 21, 71, 68, 51, 45, 4, 144, 24, 23, 77, 146, 91, 205, 50, 187, 201, 200, 18, 91, 106, 182, 90, 91,
|
157, 173, 106, 204, 194, 135, 214, 192, 203, 165, 98, 21, 71, 68, 51, 45, 4, 144, 24, 23, 77, 146, 91, 205, 50, 187, 201, 200, 18, 91, 106, 182, 90, 91,
|
||||||
|
@ -635,7 +635,7 @@ export const TRI468: Array<number> = [
|
||||||
259, 443, 259, 260, 444, 260, 467, 445, 309, 459, 250, 305, 289, 290, 305, 290, 460, 401, 376, 435, 309, 250, 392, 376, 411, 433, 453, 341, 464, 357,
|
259, 443, 259, 260, 444, 260, 467, 445, 309, 459, 250, 305, 289, 290, 305, 290, 460, 401, 376, 435, 309, 250, 392, 376, 411, 433, 453, 341, 464, 357,
|
||||||
453, 465, 343, 357, 412, 437, 343, 399, 344, 360, 440, 420, 437, 456, 360, 420, 363, 361, 401, 288, 265, 372, 353, 390, 339, 249, 339, 448, 255];
|
453, 465, 343, 357, 412, 437, 343, 399, 344, 360, 440, 420, 437, 456, 360, 420, 363, 361, 401, 288, 265, 372, 353, 390, 339, 249, 339, 448, 255];
|
||||||
|
|
||||||
export const TRI68: Array<number> = [0, 1, 36, 0, 36, 17, 1, 2, 41, 1, 41, 36, 2, 3, 31, 2, 31, 41, 3, 4, 48, 3, 48, 31, 4, 5, 48, 5, 6, 48, 6, 7, 59, 6, 59, 48, 7, 8, 58, 7, 58, 59,
|
export const TRI68: number[] = [0, 1, 36, 0, 36, 17, 1, 2, 41, 1, 41, 36, 2, 3, 31, 2, 31, 41, 3, 4, 48, 3, 48, 31, 4, 5, 48, 5, 6, 48, 6, 7, 59, 6, 59, 48, 7, 8, 58, 7, 58, 59,
|
||||||
8, 9, 56, 8, 56, 57, 8, 57, 58, 9, 10, 55, 9, 55, 56, 10, 11, 54, 10, 54, 55, 11, 12, 54, 12, 13, 54, 13, 14, 35, 13, 35, 54, 14, 15, 46, 14, 46, 35, 15, 16,
|
8, 9, 56, 8, 56, 57, 8, 57, 58, 9, 10, 55, 9, 55, 56, 10, 11, 54, 10, 54, 55, 11, 12, 54, 12, 13, 54, 13, 14, 35, 13, 35, 54, 14, 15, 46, 14, 46, 35, 15, 16,
|
||||||
45, 15, 45, 46, 16, 26, 45, 17, 36, 18, 18, 37, 19, 18, 36, 37, 19, 38, 20, 19, 37, 38, 20, 39, 21, 20, 38, 39, 21, 39, 27, 22, 42, 23, 22, 27, 42, 23, 43, 24,
|
45, 15, 45, 46, 16, 26, 45, 17, 36, 18, 18, 37, 19, 18, 36, 37, 19, 38, 20, 19, 37, 38, 20, 39, 21, 20, 38, 39, 21, 39, 27, 22, 42, 23, 22, 27, 42, 23, 43, 24,
|
||||||
23, 42, 43, 24, 44, 25, 24, 43, 44, 25, 45, 26, 25, 44, 45, 27, 39, 28, 27, 28, 42, 28, 39, 29, 28, 29, 42, 29, 31, 30, 29, 30, 35, 29, 40, 31, 29, 35, 47, 29,
|
23, 42, 43, 24, 44, 25, 24, 43, 44, 25, 45, 26, 25, 44, 45, 27, 39, 28, 27, 28, 42, 28, 39, 29, 28, 29, 42, 29, 31, 30, 29, 30, 35, 29, 40, 31, 29, 35, 47, 29,
|
||||||
|
@ -644,7 +644,7 @@ export const TRI68: Array<number> = [0, 1, 36, 0, 36, 17, 1, 2, 41, 1, 41, 36, 2
|
||||||
48, 59, 60, 49, 61, 50, 49, 60, 61, 50, 62, 51, 50, 61, 62, 51, 62, 52, 52, 63, 53, 52, 62, 63, 53, 64, 54, 53, 63, 64, 54, 64, 55, 55, 65, 56, 55, 64, 65, 56,
|
48, 59, 60, 49, 61, 50, 49, 60, 61, 50, 62, 51, 50, 61, 62, 51, 62, 52, 52, 63, 53, 52, 62, 63, 53, 64, 54, 53, 63, 64, 54, 64, 55, 55, 65, 56, 55, 64, 65, 56,
|
||||||
66, 57, 56, 65, 66, 57, 66, 58, 58, 67, 59, 58, 66, 67, 59, 67, 60, 60, 67, 61, 61, 66, 62, 61, 67, 66, 62, 66, 63, 63, 65, 64, 63, 66, 65, 21, 27, 22];
|
66, 57, 56, 65, 66, 57, 66, 58, 58, 67, 59, 58, 66, 67, 59, 67, 60, 60, 67, 61, 61, 66, 62, 61, 67, 66, 62, 66, 63, 63, 65, 64, 63, 66, 65, 21, 27, 22];
|
||||||
|
|
||||||
export const TRI33: Array<number> = [
|
export const TRI33: number[] = [
|
||||||
/* eyes */ 0, 8, 7, 7, 8, 1, 2, 10, 9, 9, 10, 3,
|
/* eyes */ 0, 8, 7, 7, 8, 1, 2, 10, 9, 9, 10, 3,
|
||||||
/* brows */ 17, 0, 18, 18, 0, 7, 18, 7, 19, 19, 7, 1, 19, 1, 11, 19, 11, 20, 21, 3, 22, 21, 9, 3, 20, 9, 21, 20, 2, 9, 20, 11, 2,
|
/* brows */ 17, 0, 18, 18, 0, 7, 18, 7, 19, 19, 7, 1, 19, 1, 11, 19, 11, 20, 21, 3, 22, 21, 9, 3, 20, 9, 21, 20, 2, 9, 20, 11, 2,
|
||||||
/* 4head */ 23, 17, 18, 25, 21, 22, 24, 19, 20, 24, 18, 19, 24, 20, 21, 24, 23, 18, 24, 21, 25,
|
/* 4head */ 23, 17, 18, 25, 21, 22, 24, 19, 20, 24, 18, 19, 24, 20, 21, 24, 23, 18, 24, 21, 25,
|
||||||
|
@ -655,9 +655,9 @@ export const TRI33: Array<number> = [
|
||||||
/* cont */ 26, 30, 5, 27, 6, 31, 0, 28, 26, 3, 27, 29, 17, 28, 0, 3, 29, 22, 23, 28, 17, 22, 29, 25, 28, 30, 26, 27, 31, 29,
|
/* cont */ 26, 30, 5, 27, 6, 31, 0, 28, 26, 3, 27, 29, 17, 28, 0, 3, 29, 22, 23, 28, 17, 22, 29, 25, 28, 30, 26, 27, 31, 29,
|
||||||
];
|
];
|
||||||
|
|
||||||
export const TRI7: Array<number> = [0, 4, 1, 2, 4, 3, 4, 5, 6];
|
export const TRI7: number[] = [0, 4, 1, 2, 4, 3, 4, 5, 6];
|
||||||
|
|
||||||
export const VTX68: Array<number> = [
|
export const VTX68: number[] = [
|
||||||
/* cont */ 127, 234, 132, 58, 172, 150, 149, 148, 152, 377, 378, 379, 397, 288, 361, 454, 356,
|
/* cont */ 127, 234, 132, 58, 172, 150, 149, 148, 152, 377, 378, 379, 397, 288, 361, 454, 356,
|
||||||
/* brows */ 70, 63, 105, 66, 107, 336, 296, 334, 293, 300,
|
/* brows */ 70, 63, 105, 66, 107, 336, 296, 334, 293, 300,
|
||||||
/* nose */ 168, 6, 195, 4, 98, 97, 2, 326, 327,
|
/* nose */ 168, 6, 195, 4, 98, 97, 2, 326, 327,
|
||||||
|
@ -666,9 +666,9 @@ export const VTX68: Array<number> = [
|
||||||
/* mouth */ 78, 81, 13, 311, 308, 402, 14, 178,
|
/* mouth */ 78, 81, 13, 311, 308, 402, 14, 178,
|
||||||
];
|
];
|
||||||
|
|
||||||
export const VTX33: Array<number> = [33, 133, 362, 263, 1, 62, 308, 159, 145, 386, 374, 6, 102, 331, 2, 13, 14, 70, 105, 107, 336, 334, 300, 54, 10, 284, 50, 280, 234, 454, 58, 288, 152];
|
export const VTX33: number[] = [33, 133, 362, 263, 1, 62, 308, 159, 145, 386, 374, 6, 102, 331, 2, 13, 14, 70, 105, 107, 336, 334, 300, 54, 10, 284, 50, 280, 234, 454, 58, 288, 152];
|
||||||
|
|
||||||
export const VTX7: Array<number> = [33, 133, 362, 263, 1, 78, 308];
|
export const VTX7: number[] = [33, 133, 362, 263, 1, 78, 308];
|
||||||
|
|
||||||
export const UV68 = VTX68.map((x) => UV468[x]);
|
export const UV68 = VTX68.map((x) => UV468[x]);
|
||||||
|
|
||||||
|
@ -679,7 +679,7 @@ export const UV7 = VTX7.map((x) => UV468[x]);
|
||||||
// https://github.com/tensorflow/tfjs-models/blob/master/face-landmarks-detection/src/constants.ts
|
// https://github.com/tensorflow/tfjs-models/blob/master/face-landmarks-detection/src/constants.ts
|
||||||
// https://github.com/google/mediapipe/mediapipe/python/solutions/face_mesh_connections.py
|
// https://github.com/google/mediapipe/mediapipe/python/solutions/face_mesh_connections.py
|
||||||
|
|
||||||
type PairArray = Array<[number, number]>;
|
type PairArray = [number, number][];
|
||||||
|
|
||||||
function connectionsToIndices(connections: PairArray) {
|
function connectionsToIndices(connections: PairArray) {
|
||||||
const indices = connections.map((connection) => connection[0]);
|
const indices = connections.map((connection) => connection[0]);
|
||||||
|
|
|
@ -84,13 +84,13 @@ export const dot = (v1: number[], v2: number[]) => {
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getColumnFrom2DArr = (arr, columnIndex) => {
|
export const getColumnFrom2DArr = (arr, columnIndex) => {
|
||||||
const column: Array<number> = [];
|
const column: number[] = [];
|
||||||
for (let i = 0; i < arr.length; i++) column.push(arr[i][columnIndex]);
|
for (let i = 0; i < arr.length; i++) column.push(arr[i][columnIndex]);
|
||||||
return column;
|
return column;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const multiplyTransformMatrices = (mat1, mat2) => {
|
export const multiplyTransformMatrices = (mat1, mat2) => {
|
||||||
const product: Array<number[]> = [];
|
const product: number[][] = [];
|
||||||
const size = mat1.length;
|
const size = mat1.length;
|
||||||
for (let row = 0; row < size; row++) {
|
for (let row = 0; row < size; row++) {
|
||||||
product.push([]);
|
product.push([]);
|
||||||
|
@ -124,7 +124,7 @@ export function generateAnchors(inputSize) {
|
||||||
const spec = inputSize === 192
|
const spec = inputSize === 192
|
||||||
? { strides: [4], anchors: [1] } // facemesh-detector
|
? { strides: [4], anchors: [1] } // facemesh-detector
|
||||||
: { strides: [inputSize / 16, inputSize / 8], anchors: [2, 6] }; // blazeface
|
: { strides: [inputSize / 16, inputSize / 8], anchors: [2, 6] }; // blazeface
|
||||||
const anchors: Array<[number, number]> = [];
|
const anchors: [number, number][] = [];
|
||||||
for (let i = 0; i < spec.strides.length; i++) {
|
for (let i = 0; i < spec.strides.length; i++) {
|
||||||
const stride = spec.strides[i];
|
const stride = spec.strides[i];
|
||||||
const gridRows = Math.floor((inputSize + stride - 1) / stride);
|
const gridRows = Math.floor((inputSize + stride - 1) / stride);
|
||||||
|
|
|
@ -16,15 +16,15 @@ import type { Tensor, GraphModel } from '../tfjs/types';
|
||||||
import type { Config } from '../config';
|
import type { Config } from '../config';
|
||||||
import type { Gender, Race } from '../result';
|
import type { Gender, Race } from '../result';
|
||||||
|
|
||||||
export type FaceRes = { age: number, gender: Gender, genderScore: number, descriptor: number[], race?: { score: number, race: Race }[] };
|
export interface FaceRes { age: number, gender: Gender, genderScore: number, descriptor: number[], race?: { score: number, race: Race }[] }
|
||||||
|
|
||||||
let model: GraphModel | null;
|
let model: GraphModel | null;
|
||||||
const last: Array<{
|
const last: {
|
||||||
age: number,
|
age: number,
|
||||||
gender: Gender,
|
gender: Gender,
|
||||||
genderScore: number,
|
genderScore: number,
|
||||||
descriptor: number[],
|
descriptor: number[],
|
||||||
}> = [];
|
}[] = [];
|
||||||
|
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
let lastCount = 0;
|
let lastCount = 0;
|
||||||
|
@ -88,7 +88,7 @@ export async function predict(image: Tensor, config: Config, idx: number, count:
|
||||||
const genderT = await resT.find((t) => t.shape[1] === 1) as Tensor;
|
const genderT = await resT.find((t) => t.shape[1] === 1) as Tensor;
|
||||||
const gender = await genderT.data();
|
const gender = await genderT.data();
|
||||||
const confidence = Math.trunc(200 * Math.abs((gender[0] - 0.5))) / 100;
|
const confidence = Math.trunc(200 * Math.abs((gender[0] - 0.5))) / 100;
|
||||||
if (confidence > (config.face.description?.minConfidence || 0)) {
|
if (confidence > (config.face.description.minConfidence || 0)) {
|
||||||
obj.gender = gender[0] <= 0.5 ? 'female' : 'male';
|
obj.gender = gender[0] <= 0.5 ? 'female' : 'male';
|
||||||
obj.genderScore = Math.min(0.99, confidence);
|
obj.genderScore = Math.min(0.99, confidence);
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ import type { Config } from '../config';
|
||||||
import { env } from '../util/env';
|
import { env } from '../util/env';
|
||||||
|
|
||||||
let model: GraphModel | null;
|
let model: GraphModel | null;
|
||||||
const last: Array<number[]> = [];
|
const last: number[][] = [];
|
||||||
let lastCount = 0;
|
let lastCount = 0;
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
@ -35,14 +35,14 @@ export async function predict(input: Tensor, config: Config, idx, count): Promis
|
||||||
return last[idx];
|
return last[idx];
|
||||||
}
|
}
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
let data: Array<number> = [];
|
let data: number[] = [];
|
||||||
if (config.face['insightface']?.enabled && model?.inputs[0].shape) {
|
if (config.face['insightface']?.enabled && model?.inputs[0].shape) {
|
||||||
const t: Record<string, Tensor> = {};
|
const t: Record<string, Tensor> = {};
|
||||||
t.crop = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // just resize to fit the embedding model
|
t.crop = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // just resize to fit the embedding model
|
||||||
// do a tight crop of image and resize it to fit the model
|
// do a tight crop of image and resize it to fit the model
|
||||||
// const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
|
// const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
|
||||||
// t.crop = tf.image.cropAndResize(input, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
// t.crop = tf.image.cropAndResize(input, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||||
t.data = model?.execute(t.crop) as Tensor;
|
t.data = model.execute(t.crop) as Tensor;
|
||||||
const output = await t.data.data();
|
const output = await t.data.data();
|
||||||
data = Array.from(output); // convert typed array to simple array
|
data = Array.from(output); // convert typed array to simple array
|
||||||
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
|
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
|
||||||
|
|
|
@ -13,8 +13,8 @@ let inputSize = 0;
|
||||||
|
|
||||||
const irisEnlarge = 2.3;
|
const irisEnlarge = 2.3;
|
||||||
|
|
||||||
const leftOutline = coords.meshAnnotations['leftEyeLower0'];
|
const leftOutline = coords.meshAnnotations.leftEyeLower0;
|
||||||
const rightOutline = coords.meshAnnotations['rightEyeLower0'];
|
const rightOutline = coords.meshAnnotations.rightEyeLower0;
|
||||||
|
|
||||||
const eyeLandmarks = {
|
const eyeLandmarks = {
|
||||||
leftBounds: [leftOutline[0], leftOutline[leftOutline.length - 1]],
|
leftBounds: [leftOutline[0], leftOutline[leftOutline.length - 1]],
|
||||||
|
@ -80,7 +80,7 @@ export const getEyeBox = (rawCoords, face, eyeInnerCornerIndex, eyeOuterCornerIn
|
||||||
|
|
||||||
// Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris.
|
// Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris.
|
||||||
export const getEyeCoords = (eyeData, eyeBox, eyeBoxSize, flip = false) => {
|
export const getEyeCoords = (eyeData, eyeBox, eyeBoxSize, flip = false) => {
|
||||||
const eyeRawCoords: Array<Point> = [];
|
const eyeRawCoords: Point[] = [];
|
||||||
for (let i = 0; i < irisLandmarks.numCoordinates; i++) {
|
for (let i = 0; i < irisLandmarks.numCoordinates; i++) {
|
||||||
const x = eyeData[i * 3];
|
const x = eyeData[i * 3];
|
||||||
const y = eyeData[i * 3 + 1];
|
const y = eyeData[i * 3 + 1];
|
||||||
|
|
|
@ -10,7 +10,7 @@ import * as tf from '../../dist/tfjs.esm.js';
|
||||||
import { env } from '../util/env';
|
import { env } from '../util/env';
|
||||||
|
|
||||||
let model: GraphModel | null;
|
let model: GraphModel | null;
|
||||||
const cached: Array<number> = [];
|
const cached: number[] = [];
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
let lastCount = 0;
|
let lastCount = 0;
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
|
|
|
@ -7,7 +7,7 @@ const expandFact = 0.1;
|
||||||
const alpha = 0.5;
|
const alpha = 0.5;
|
||||||
|
|
||||||
// point inclusion in polygon based on https://wrf.ecse.rpi.edu/Research/Short_Notes/pnpoly.html
|
// point inclusion in polygon based on https://wrf.ecse.rpi.edu/Research/Short_Notes/pnpoly.html
|
||||||
function insidePoly(x: number, y: number, polygon: Array<{ x: number, y: number }>): boolean {
|
function insidePoly(x: number, y: number, polygon: { x: number, y: number }[]): boolean {
|
||||||
let inside = false;
|
let inside = false;
|
||||||
let j = polygon.length - 1;
|
let j = polygon.length - 1;
|
||||||
for (let i = 0; i < polygon.length; j = i++) {
|
for (let i = 0; i < polygon.length; j = i++) {
|
||||||
|
@ -22,7 +22,7 @@ export async function mask(face: FaceResult): Promise<Tensor | undefined> {
|
||||||
const width = face.tensor.shape[2] || 0;
|
const width = face.tensor.shape[2] || 0;
|
||||||
const height = face.tensor.shape[1] || 0;
|
const height = face.tensor.shape[1] || 0;
|
||||||
const buffer = await face.tensor.buffer();
|
const buffer = await face.tensor.buffer();
|
||||||
let silhouette: Array<{ x: number, y: number }> = [];
|
let silhouette: { x: number, y: number }[] = [];
|
||||||
for (const pt of meshAnnotations.silhouette) silhouette.push({ x: (face.mesh[pt][0] - face.box[0]) / face.box[2], y: (face.mesh[pt][1] - face.box[1]) / face.box[3] }); // add all silhouette points scaled to local box
|
for (const pt of meshAnnotations.silhouette) silhouette.push({ x: (face.mesh[pt][0] - face.box[0]) / face.box[2], y: (face.mesh[pt][1] - face.box[1]) / face.box[3] }); // add all silhouette points scaled to local box
|
||||||
if (expandFact && expandFact > 0) silhouette = silhouette.map((pt) => ({ x: pt.x > 0.5 ? pt.x + expandFact : pt.x - expandFact, y: pt.y > 0.5 ? pt.y + expandFact : pt.y - expandFact })); // expand silhouette
|
if (expandFact && expandFact > 0) silhouette = silhouette.map((pt) => ({ x: pt.x > 0.5 ? pt.x + expandFact : pt.x - expandFact, y: pt.y > 0.5 ? pt.y + expandFact : pt.y - expandFact })); // expand silhouette
|
||||||
for (let x = 0; x < width; x++) {
|
for (let x = 0; x < width; x++) {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/** Face descriptor type as number array */
|
/** Face descriptor type as number array */
|
||||||
export type Descriptor = Array<number>
|
export type Descriptor = number[]
|
||||||
export type MatchOptions = { order?: number, threshold?: number, multiplier?: number, min?: number, max?: number } | undefined;
|
export type MatchOptions = { order?: number, threshold?: number, multiplier?: number, min?: number, max?: number } | undefined;
|
||||||
|
|
||||||
/** Calculates distance between two descriptors
|
/** Calculates distance between two descriptors
|
||||||
|
@ -54,7 +54,7 @@ export function similarity(descriptor1: Descriptor, descriptor2: Descriptor, opt
|
||||||
* - `distance` calculated `distance` of given descriptor to the best match
|
* - `distance` calculated `distance` of given descriptor to the best match
|
||||||
* - `similarity` calculated normalized `similarity` of given descriptor to the best match
|
* - `similarity` calculated normalized `similarity` of given descriptor to the best match
|
||||||
*/
|
*/
|
||||||
export function match(descriptor: Descriptor, descriptors: Array<Descriptor>, options: MatchOptions = { order: 2, multiplier: 25, threshold: 0, min: 0.2, max: 0.8 }) {
|
export function match(descriptor: Descriptor, descriptors: Descriptor[], options: MatchOptions = { order: 2, multiplier: 25, threshold: 0, min: 0.2, max: 0.8 }) {
|
||||||
if (!Array.isArray(descriptor) || !Array.isArray(descriptors) || descriptor.length < 64 || descriptors.length === 0) { // validate input
|
if (!Array.isArray(descriptor) || !Array.isArray(descriptors) || descriptor.length < 64 || descriptors.length === 0) { // validate input
|
||||||
return { index: -1, distance: Number.POSITIVE_INFINITY, similarity: 0 };
|
return { index: -1, distance: Number.POSITIVE_INFINITY, similarity: 0 };
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,14 +14,14 @@ import type { Config } from '../config';
|
||||||
import { env } from '../util/env';
|
import { env } from '../util/env';
|
||||||
|
|
||||||
let model: GraphModel | null;
|
let model: GraphModel | null;
|
||||||
const last: Array<number[]> = [];
|
const last: number[][] = [];
|
||||||
let lastCount = 0;
|
let lastCount = 0;
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
export async function load(config: Config): Promise<GraphModel> {
|
export async function load(config: Config): Promise<GraphModel> {
|
||||||
if (env.initial) model = null;
|
if (env.initial) model = null;
|
||||||
if (!model) model = await loadModel(config.face['mobilefacenet'].modelPath);
|
if (!model) model = await loadModel(config.face['mobilefacenet']?.modelPath);
|
||||||
else if (config.debug) log('cached model:', model['modelUrl']);
|
else if (config.debug) log('cached model:', model['modelUrl']);
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
@ -53,14 +53,14 @@ export async function predict(input: Tensor, config: Config, idx, count): Promis
|
||||||
return last[idx];
|
return last[idx];
|
||||||
}
|
}
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
let data: Array<number> = [];
|
let data: number[] = [];
|
||||||
if (config.face['mobilefacenet']?.enabled && model?.inputs[0].shape) {
|
if (config.face['mobilefacenet']?.enabled && model?.inputs[0].shape) {
|
||||||
const t: Record<string, Tensor> = {};
|
const t: Record<string, Tensor> = {};
|
||||||
t.crop = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // just resize to fit the embedding model
|
t.crop = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // just resize to fit the embedding model
|
||||||
// do a tight crop of image and resize it to fit the model
|
// do a tight crop of image and resize it to fit the model
|
||||||
// const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
|
// const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
|
||||||
// t.crop = tf.image.cropAndResize(input, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
// t.crop = tf.image.cropAndResize(input, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||||
t.data = model?.execute(t.crop) as Tensor;
|
t.data = model.execute(t.crop) as Tensor;
|
||||||
/*
|
/*
|
||||||
// optional normalize outputs with l2 normalization
|
// optional normalize outputs with l2 normalization
|
||||||
const scaled = tf.tidy(() => {
|
const scaled = tf.tidy(() => {
|
||||||
|
|
|
@ -15,7 +15,7 @@ import { constants } from '../tfjs/constants';
|
||||||
|
|
||||||
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];
|
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];
|
||||||
let model: GraphModel | null;
|
let model: GraphModel | null;
|
||||||
const last: Array<Array<{ score: number, emotion: Emotion }>> = [];
|
const last: { score: number, emotion: Emotion }[][] = [];
|
||||||
let lastCount = 0;
|
let lastCount = 0;
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
@ -27,7 +27,7 @@ export async function load(config: Config): Promise<GraphModel> {
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<Array<{ score: number, emotion: Emotion }>> {
|
export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<{ score: number, emotion: Emotion }[]> {
|
||||||
if (!model) return [];
|
if (!model) return [];
|
||||||
const skipFrame = skipped < (config.face.emotion?.skipFrames || 0);
|
const skipFrame = skipped < (config.face.emotion?.skipFrames || 0);
|
||||||
const skipTime = (config.face.emotion?.skipTime || 0) > (now() - lastTime);
|
const skipTime = (config.face.emotion?.skipTime || 0) > (now() - lastTime);
|
||||||
|
@ -37,7 +37,7 @@ export async function predict(image: Tensor, config: Config, idx: number, count:
|
||||||
}
|
}
|
||||||
skipped = 0;
|
skipped = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const obj: Array<{ score: number, emotion: Emotion }> = [];
|
const obj: { score: number, emotion: Emotion }[] = [];
|
||||||
if (config.face.emotion?.enabled) {
|
if (config.face.emotion?.enabled) {
|
||||||
const t: Record<string, Tensor> = {};
|
const t: Record<string, Tensor> = {};
|
||||||
const inputSize = model?.inputs[0].shape ? model.inputs[0].shape[2] : 0;
|
const inputSize = model?.inputs[0].shape ? model.inputs[0].shape[2] : 0;
|
||||||
|
@ -58,7 +58,7 @@ export async function predict(image: Tensor, config: Config, idx: number, count:
|
||||||
lastTime = now();
|
lastTime = now();
|
||||||
const data = await t.emotion.data();
|
const data = await t.emotion.data();
|
||||||
for (let i = 0; i < data.length; i++) {
|
for (let i = 0; i < data.length; i++) {
|
||||||
if (data[i] > (config.face.emotion?.minConfidence || 0)) obj.push({ score: Math.min(0.99, Math.trunc(100 * data[i]) / 100), emotion: annotations[i] as Emotion });
|
if (data[i] > (config.face.emotion.minConfidence || 0)) obj.push({ score: Math.min(0.99, Math.trunc(100 * data[i]) / 100), emotion: annotations[i] as Emotion });
|
||||||
}
|
}
|
||||||
obj.sort((a, b) => b.score - a.score);
|
obj.sort((a, b) => b.score - a.score);
|
||||||
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
|
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
|
||||||
|
|
|
@ -12,28 +12,26 @@ import type { Config } from '../config';
|
||||||
import type { GraphModel, Tensor } from '../tfjs/types';
|
import type { GraphModel, Tensor } from '../tfjs/types';
|
||||||
import { env } from '../util/env';
|
import { env } from '../util/env';
|
||||||
|
|
||||||
export type GearType = { age: number, gender: Gender, genderScore: number, race: Array<{ score: number, race: Race }> }
|
export interface GearType { age: number, gender: Gender, genderScore: number, race: { score: number, race: Race }[] }
|
||||||
let model: GraphModel | null;
|
let model: GraphModel | null;
|
||||||
const last: Array<GearType> = [];
|
const last: GearType[] = [];
|
||||||
const raceNames = ['white', 'black', 'asian', 'indian', 'other'];
|
const raceNames = ['white', 'black', 'asian', 'indian', 'other'];
|
||||||
const ageWeights = [15, 23, 28, 35.5, 45.5, 55.5, 65];
|
const ageWeights = [15, 23, 28, 35.5, 45.5, 55.5, 65];
|
||||||
let lastCount = 0;
|
let lastCount = 0;
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
export async function load(config: Config) {
|
export async function load(config: Config) {
|
||||||
if (env.initial) model = null;
|
if (env.initial) model = null;
|
||||||
if (!model) model = await loadModel(config.face['gear']?.modelPath);
|
if (!model) model = await loadModel(config.face.gear?.modelPath);
|
||||||
else if (config.debug) log('cached model:', model['modelUrl']);
|
else if (config.debug) log('cached model:', model['modelUrl']);
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<GearType> {
|
export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<GearType> {
|
||||||
if (!model) return { age: 0, gender: 'unknown', genderScore: 0, race: [] };
|
if (!model) return { age: 0, gender: 'unknown', genderScore: 0, race: [] };
|
||||||
const skipFrame = skipped < (config.face['gear']?.skipFrames || 0);
|
const skipFrame = skipped < (config.face.gear?.skipFrames || 0);
|
||||||
const skipTime = (config.face['gear']?.skipTime || 0) > (now() - lastTime);
|
const skipTime = (config.face.gear?.skipTime || 0) > (now() - lastTime);
|
||||||
if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && last[idx]) {
|
if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && last[idx]) {
|
||||||
skipped++;
|
skipped++;
|
||||||
return last[idx];
|
return last[idx];
|
||||||
|
@ -46,13 +44,13 @@ export async function predict(image: Tensor, config: Config, idx: number, count:
|
||||||
const box = [[0.0, 0.10, 0.90, 0.90]]; // empyrical values for top, left, bottom, right
|
const box = [[0.0, 0.10, 0.90, 0.90]]; // empyrical values for top, left, bottom, right
|
||||||
t.resize = tf.image.cropAndResize(image, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
t.resize = tf.image.cropAndResize(image, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||||
const obj: GearType = { age: 0, gender: 'unknown', genderScore: 0, race: [] };
|
const obj: GearType = { age: 0, gender: 'unknown', genderScore: 0, race: [] };
|
||||||
if (config.face['gear']?.enabled) [t.age, t.gender, t.race] = model.execute(t.resize, ['age_output', 'gender_output', 'race_output']) as Tensor[];
|
if (config.face.gear?.enabled) [t.age, t.gender, t.race] = model.execute(t.resize, ['age_output', 'gender_output', 'race_output']) as Tensor[];
|
||||||
const gender = await t.gender.data();
|
const gender = await t.gender.data();
|
||||||
obj.gender = gender[0] > gender[1] ? 'male' : 'female';
|
obj.gender = gender[0] > gender[1] ? 'male' : 'female';
|
||||||
obj.genderScore = Math.round(100 * (gender[0] > gender[1] ? gender[0] : gender[1])) / 100;
|
obj.genderScore = Math.round(100 * (gender[0] > gender[1] ? gender[0] : gender[1])) / 100;
|
||||||
const race = await t.race.data();
|
const race = await t.race.data();
|
||||||
for (let i = 0; i < race.length; i++) {
|
for (let i = 0; i < race.length; i++) {
|
||||||
if (race[i] > (config.face['gear']?.minConfidence || 0.2)) obj.race.push({ score: Math.round(100 * race[i]) / 100, race: raceNames[i] as Race });
|
if (race[i] > (config.face.gear?.minConfidence || 0.2)) obj.race.push({ score: Math.round(100 * race[i]) / 100, race: raceNames[i] as Race });
|
||||||
}
|
}
|
||||||
obj.race.sort((a, b) => b.score - a.score);
|
obj.race.sort((a, b) => b.score - a.score);
|
||||||
// {0: 'Below20', 1: '21-25', 2: '26-30', 3: '31-40',4: '41-50', 5: '51-60', 6: 'Above60'}
|
// {0: 'Below20', 1: '21-25', 2: '26-30', 3: '31-40',4: '41-50', 5: '51-60', 6: 'Above60'}
|
||||||
|
|
|
@ -13,12 +13,11 @@ import type { Config } from '../config';
|
||||||
import type { GraphModel, Tensor } from '../tfjs/types';
|
import type { GraphModel, Tensor } from '../tfjs/types';
|
||||||
|
|
||||||
let model: GraphModel | null;
|
let model: GraphModel | null;
|
||||||
const last: Array<{ age: number }> = [];
|
const last: { age: number }[] = [];
|
||||||
let lastCount = 0;
|
let lastCount = 0;
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
export async function load(config: Config) {
|
export async function load(config: Config) {
|
||||||
if (env.initial) model = null;
|
if (env.initial) model = null;
|
||||||
if (!model) model = await loadModel(config.face['ssrnet'].modelPathAge);
|
if (!model) model = await loadModel(config.face['ssrnet'].modelPathAge);
|
||||||
|
@ -26,7 +25,6 @@ export async function load(config: Config) {
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<{ age: number }> {
|
export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<{ age: number }> {
|
||||||
if (!model) return { age: 0 };
|
if (!model) return { age: 0 };
|
||||||
const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0);
|
const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0);
|
||||||
|
@ -42,7 +40,7 @@ export async function predict(image: Tensor, config: Config, idx: number, count:
|
||||||
t.resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
t.resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||||
t.enhance = tf.mul(t.resize, constants.tf255);
|
t.enhance = tf.mul(t.resize, constants.tf255);
|
||||||
const obj = { age: 0 };
|
const obj = { age: 0 };
|
||||||
if (config.face['ssrnet'].enabled) t.age = model.execute(t.enhance) as Tensor;
|
if (config.face['ssrnet']?.enabled) t.age = model.execute(t.enhance) as Tensor;
|
||||||
if (t.age) {
|
if (t.age) {
|
||||||
const data = await t.age.data();
|
const data = await t.age.data();
|
||||||
obj.age = Math.trunc(10 * data[0]) / 10;
|
obj.age = Math.trunc(10 * data[0]) / 10;
|
||||||
|
|
|
@ -14,7 +14,7 @@ import type { GraphModel, Tensor } from '../tfjs/types';
|
||||||
import { env } from '../util/env';
|
import { env } from '../util/env';
|
||||||
|
|
||||||
let model: GraphModel | null;
|
let model: GraphModel | null;
|
||||||
const last: Array<{ gender: Gender, genderScore: number }> = [];
|
const last: { gender: Gender, genderScore: number }[] = [];
|
||||||
let lastCount = 0;
|
let lastCount = 0;
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
@ -22,15 +22,13 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
// tuning values
|
// tuning values
|
||||||
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
export async function load(config: Config) {
|
||||||
export async function load(config: Config | any) {
|
|
||||||
if (env.initial) model = null;
|
if (env.initial) model = null;
|
||||||
if (!model) model = await loadModel(config.face['ssrnet'].modelPathGender);
|
if (!model) model = await loadModel(config.face['ssrnet']?.modelPathGender);
|
||||||
else if (config.debug) log('cached model:', model['modelUrl']);
|
else if (config.debug) log('cached model:', model['modelUrl']);
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
export async function predict(image: Tensor, config: Config, idx, count): Promise<{ gender: Gender, genderScore: number }> {
|
export async function predict(image: Tensor, config: Config, idx, count): Promise<{ gender: Gender, genderScore: number }> {
|
||||||
if (!model) return { gender: 'unknown', genderScore: 0 };
|
if (!model) return { gender: 'unknown', genderScore: 0 };
|
||||||
const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0);
|
const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0);
|
||||||
|
@ -54,7 +52,7 @@ export async function predict(image: Tensor, config: Config, idx, count): Promis
|
||||||
return normalize;
|
return normalize;
|
||||||
});
|
});
|
||||||
const obj: { gender: Gender, genderScore: number } = { gender: 'unknown', genderScore: 0 };
|
const obj: { gender: Gender, genderScore: number } = { gender: 'unknown', genderScore: 0 };
|
||||||
if (config.face['ssrnet'].enabled) t.gender = model.execute(t.enhance) as Tensor;
|
if (config.face['ssrnet']?.enabled) t.gender = model.execute(t.enhance) as Tensor;
|
||||||
const data = await t.gender.data();
|
const data = await t.gender.data();
|
||||||
obj.gender = data[0] > data[1] ? 'female' : 'male'; // returns two values 0..1, bigger one is prediction
|
obj.gender = data[0] > data[1] ? 'female' : 'male'; // returns two values 0..1, bigger one is prediction
|
||||||
obj.genderScore = data[0] > data[1] ? (Math.trunc(100 * data[0]) / 100) : (Math.trunc(100 * data[1]) / 100);
|
obj.genderScore = data[0] > data[1] ? (Math.trunc(100 * data[0]) / 100) : (Math.trunc(100 * data[1]) / 100);
|
||||||
|
|
|
@ -33,7 +33,7 @@ export type HandGesture =
|
||||||
|
|
||||||
export const body = (res: BodyResult[]): GestureResult[] => {
|
export const body = (res: BodyResult[]): GestureResult[] => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures: Array<{ body: number, gesture: BodyGesture }> = [];
|
const gestures: { body: number, gesture: BodyGesture }[] = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
// raising hands
|
// raising hands
|
||||||
const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist'));
|
const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist'));
|
||||||
|
@ -55,7 +55,7 @@ export const body = (res: BodyResult[]): GestureResult[] => {
|
||||||
|
|
||||||
export const face = (res: FaceResult[]): GestureResult[] => {
|
export const face = (res: FaceResult[]): GestureResult[] => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures: Array<{ face: number, gesture: FaceGesture }> = [];
|
const gestures: { face: number, gesture: FaceGesture }[] = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
if (res[i].mesh && res[i].mesh.length > 450) {
|
if (res[i].mesh && res[i].mesh.length > 450) {
|
||||||
const zDiff = (res[i].mesh[33][2] || 0) - (res[i].mesh[263][2] || 0);
|
const zDiff = (res[i].mesh[33][2] || 0) - (res[i].mesh[263][2] || 0);
|
||||||
|
@ -77,7 +77,7 @@ export const face = (res: FaceResult[]): GestureResult[] => {
|
||||||
|
|
||||||
export const iris = (res: FaceResult[]): GestureResult[] => {
|
export const iris = (res: FaceResult[]): GestureResult[] => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures: Array<{ iris: number, gesture: IrisGesture }> = [];
|
const gestures: { iris: number, gesture: IrisGesture }[] = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
if (!res[i].annotations || !res[i].annotations.leftEyeIris || !res[i].annotations.leftEyeIris[0] || !res[i].annotations.rightEyeIris || !res[i].annotations.rightEyeIris[0]) continue;
|
if (!res[i].annotations || !res[i].annotations.leftEyeIris || !res[i].annotations.leftEyeIris[0] || !res[i].annotations.rightEyeIris || !res[i].annotations.rightEyeIris[0]) continue;
|
||||||
const sizeXLeft = res[i].annotations.leftEyeIris[3][0] - res[i].annotations.leftEyeIris[1][0];
|
const sizeXLeft = res[i].annotations.leftEyeIris[3][0] - res[i].annotations.leftEyeIris[1][0];
|
||||||
|
@ -118,11 +118,11 @@ export const iris = (res: FaceResult[]): GestureResult[] => {
|
||||||
|
|
||||||
export const hand = (res: HandResult[]): GestureResult[] => {
|
export const hand = (res: HandResult[]): GestureResult[] => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures: Array<{ hand: number, gesture: HandGesture }> = [];
|
const gestures: { hand: number, gesture: HandGesture }[] = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
const fingers: Array<{ name: string, position: Point }> = [];
|
const fingers: { name: string, position: Point }[] = [];
|
||||||
if (res[i]['annotations']) {
|
if (res[i].annotations) {
|
||||||
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
|
for (const [finger, pos] of Object.entries(res[i].annotations)) {
|
||||||
if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
|
if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -132,8 +132,8 @@ export const hand = (res: HandResult[]): GestureResult[] => {
|
||||||
const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));
|
const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));
|
||||||
gestures.push({ hand: i, gesture: `${highest.name} up` as HandGesture });
|
gestures.push({ hand: i, gesture: `${highest.name} up` as HandGesture });
|
||||||
}
|
}
|
||||||
if (res[i]['keypoints']) {
|
if (res[i].keypoints) {
|
||||||
const poses = fingerPose.match(res[i]['keypoints']);
|
const poses = fingerPose.match(res[i].keypoints);
|
||||||
for (const pose of poses) gestures.push({ hand: i, gesture: pose.name as HandGesture });
|
for (const pose of poses) gestures.push({ hand: i, gesture: pose.name as HandGesture });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,11 +5,8 @@
|
||||||
|
|
||||||
import { Finger, FingerCurl, FingerDirection, FingerGesture } from './fingerdef';
|
import { Finger, FingerCurl, FingerDirection, FingerGesture } from './fingerdef';
|
||||||
|
|
||||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
|
||||||
export const { thumb, index, middle, ring, pinky } = Finger;
|
export const { thumb, index, middle, ring, pinky } = Finger;
|
||||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
|
||||||
export const { none, half, full } = FingerCurl;
|
export const { none, half, full } = FingerCurl;
|
||||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
|
||||||
export const { verticalUp, verticalDown, horizontalLeft, horizontalRight, diagonalUpRight, diagonalUpLeft, diagonalDownRight, diagonalDownLeft } = FingerDirection;
|
export const { verticalUp, verticalDown, horizontalLeft, horizontalRight, diagonalUpRight, diagonalUpLeft, diagonalDownRight, diagonalDownLeft } = FingerDirection;
|
||||||
|
|
||||||
// describe thumbs up gesture 👍
|
// describe thumbs up gesture 👍
|
||||||
|
|
|
@ -4,6 +4,8 @@
|
||||||
* Based on: [**FingerPose***](https://github.com/andypotato/fingerpose)
|
* Based on: [**FingerPose***](https://github.com/andypotato/fingerpose)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/* eslint-disable camelcase */
|
||||||
|
|
||||||
import { Finger, FingerCurl, FingerDirection } from './fingerdef';
|
import { Finger, FingerCurl, FingerDirection } from './fingerdef';
|
||||||
import Gestures from '../hand/fingergesture';
|
import Gestures from '../hand/fingergesture';
|
||||||
|
|
||||||
|
@ -174,17 +176,17 @@ function calculateFingerDirection(startPoint, midPoint, endPoint, fingerSlopes)
|
||||||
|
|
||||||
function estimate(landmarks) {
|
function estimate(landmarks) {
|
||||||
// step 1: calculate slopes
|
// step 1: calculate slopes
|
||||||
const slopesXY: Array<number[]> = [];
|
const slopesXY: number[][] = [];
|
||||||
const slopesYZ: Array<number[]> = [];
|
const slopesYZ: number[][] = [];
|
||||||
const fingerCurls: Array<number> = [];
|
const fingerCurls: number[] = [];
|
||||||
const fingerDirections: Array<number> = [];
|
const fingerDirections: number[] = [];
|
||||||
if (!landmarks) return { curls: fingerCurls, directions: fingerDirections };
|
if (!landmarks) return { curls: fingerCurls, directions: fingerDirections };
|
||||||
|
|
||||||
// step 1: calculate slopes
|
// step 1: calculate slopes
|
||||||
for (const finger of Finger.all) {
|
for (const finger of Finger.all) {
|
||||||
const points = Finger.getPoints(finger);
|
const points = Finger.getPoints(finger);
|
||||||
const slopeAtXY: Array<number> = [];
|
const slopeAtXY: number[] = [];
|
||||||
const slopeAtYZ: Array<number> = [];
|
const slopeAtYZ: number[] = [];
|
||||||
for (const point of points) {
|
for (const point of points) {
|
||||||
const point1 = landmarks[point[0]];
|
const point1 = landmarks[point[0]];
|
||||||
const point2 = landmarks[point[1]];
|
const point2 = landmarks[point[1]];
|
||||||
|
@ -230,7 +232,7 @@ export function analyze(keypoints) { // get estimations of curl / direction for
|
||||||
}
|
}
|
||||||
|
|
||||||
export function match(keypoints) { // compare gesture description to each known gesture
|
export function match(keypoints) { // compare gesture description to each known gesture
|
||||||
const poses: Array<{ name: string, confidence: number }> = [];
|
const poses: { name: string, confidence: number }[] = [];
|
||||||
if (!keypoints || keypoints.length === 0) return poses;
|
if (!keypoints || keypoints.length === 0) return poses;
|
||||||
const estimatorRes = estimate(keypoints);
|
const estimatorRes = estimate(keypoints);
|
||||||
for (const gesture of Gestures) {
|
for (const gesture of Gestures) {
|
||||||
|
|
|
@ -30,7 +30,7 @@ let handPipeline: handpipeline.HandPipeline;
|
||||||
export async function predict(input: Tensor, config: Config): Promise<HandResult[]> {
|
export async function predict(input: Tensor, config: Config): Promise<HandResult[]> {
|
||||||
const predictions = await handPipeline.estimateHands(input, config);
|
const predictions = await handPipeline.estimateHands(input, config);
|
||||||
if (!predictions) return [];
|
if (!predictions) return [];
|
||||||
const hands: Array<HandResult> = [];
|
const hands: HandResult[] = [];
|
||||||
for (let i = 0; i < predictions.length; i++) {
|
for (let i = 0; i < predictions.length; i++) {
|
||||||
const annotations = {};
|
const annotations = {};
|
||||||
if (predictions[i].landmarks) {
|
if (predictions[i].landmarks) {
|
||||||
|
@ -38,7 +38,7 @@ export async function predict(input: Tensor, config: Config): Promise<HandResult
|
||||||
annotations[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
|
annotations[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const keypoints = predictions[i].landmarks as unknown as Array<Point>;
|
const keypoints = predictions[i].landmarks as unknown as Point[];
|
||||||
let box: Box = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0]; // maximums so conditionals work
|
let box: Box = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0]; // maximums so conditionals work
|
||||||
let boxRaw: Box = [0, 0, 0, 0];
|
let boxRaw: Box = [0, 0, 0, 0];
|
||||||
if (keypoints && keypoints.length > 0) { // if we have landmarks, calculate box based on landmarks
|
if (keypoints && keypoints.length > 0) { // if we have landmarks, calculate box based on landmarks
|
||||||
|
|
|
@ -68,8 +68,8 @@ export class HandDetector {
|
||||||
t.norm = this.normalizeBoxes(t.boxes);
|
t.norm = this.normalizeBoxes(t.boxes);
|
||||||
// box detection is flaky so we look for 3x boxes than we need results
|
// box detection is flaky so we look for 3x boxes than we need results
|
||||||
t.nms = await tf.image.nonMaxSuppressionAsync(t.norm, t.scores, 3 * config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);
|
t.nms = await tf.image.nonMaxSuppressionAsync(t.norm, t.scores, 3 * config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);
|
||||||
const nms = await t.nms.array() as Array<number>;
|
const nms = await t.nms.array() as number[];
|
||||||
const hands: Array<{ startPoint: Point; endPoint: Point; palmLandmarks: Point[]; confidence: number }> = [];
|
const hands: { startPoint: Point; endPoint: Point; palmLandmarks: Point[]; confidence: number }[] = [];
|
||||||
for (const index of nms) {
|
for (const index of nms) {
|
||||||
const p: Record<string, Tensor> = {};
|
const p: Record<string, Tensor> = {};
|
||||||
p.box = tf.slice(t.norm, [index, 0], [1, -1]);
|
p.box = tf.slice(t.norm, [index, 0], [1, -1]);
|
||||||
|
|
|
@ -23,7 +23,7 @@ export class HandPipeline {
|
||||||
handDetector: detector.HandDetector;
|
handDetector: detector.HandDetector;
|
||||||
handPoseModel: GraphModel;
|
handPoseModel: GraphModel;
|
||||||
inputSize: number;
|
inputSize: number;
|
||||||
storedBoxes: Array<{ startPoint: Point; endPoint: Point; palmLandmarks: Point[]; confidence: number } | null>;
|
storedBoxes: ({ startPoint: Point; endPoint: Point; palmLandmarks: Point[]; confidence: number } | null)[];
|
||||||
skipped: number;
|
skipped: number;
|
||||||
detectedHands: number;
|
detectedHands: number;
|
||||||
|
|
||||||
|
@ -36,8 +36,7 @@ export class HandPipeline {
|
||||||
this.detectedHands = 0;
|
this.detectedHands = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line class-methods-use-this
|
calculateLandmarksBoundingBox(landmarks) { // eslint-disable-line class-methods-use-this
|
||||||
calculateLandmarksBoundingBox(landmarks) {
|
|
||||||
const xs = landmarks.map((d) => d[0]);
|
const xs = landmarks.map((d) => d[0]);
|
||||||
const ys = landmarks.map((d) => d[1]);
|
const ys = landmarks.map((d) => d[1]);
|
||||||
const startPoint = [Math.min(...xs), Math.min(...ys)];
|
const startPoint = [Math.min(...xs), Math.min(...ys)];
|
||||||
|
@ -107,7 +106,7 @@ export class HandPipeline {
|
||||||
// for (const possible of boxes) this.storedBoxes.push(possible);
|
// for (const possible of boxes) this.storedBoxes.push(possible);
|
||||||
if (this.storedBoxes.length > 0) useFreshBox = true;
|
if (this.storedBoxes.length > 0) useFreshBox = true;
|
||||||
}
|
}
|
||||||
const hands: Array<{ landmarks: Point[], confidence: number, boxConfidence: number, fingerConfidence: number, box: { topLeft: Point, bottomRight: Point } }> = [];
|
const hands: { landmarks: Point[], confidence: number, boxConfidence: number, fingerConfidence: number, box: { topLeft: Point, bottomRight: Point } }[] = [];
|
||||||
|
|
||||||
// go through working set of boxes
|
// go through working set of boxes
|
||||||
for (let i = 0; i < this.storedBoxes.length; i++) {
|
for (let i = 0; i < this.storedBoxes.length; i++) {
|
||||||
|
@ -124,7 +123,7 @@ export class HandPipeline {
|
||||||
const handImage = tf.div(croppedInput, constants.tf255);
|
const handImage = tf.div(croppedInput, constants.tf255);
|
||||||
tf.dispose(croppedInput);
|
tf.dispose(croppedInput);
|
||||||
tf.dispose(rotatedImage);
|
tf.dispose(rotatedImage);
|
||||||
const [confidenceT, keypoints] = this.handPoseModel.execute(handImage) as Array<Tensor>;
|
const [confidenceT, keypoints] = this.handPoseModel.execute(handImage) as Tensor[];
|
||||||
lastTime = now();
|
lastTime = now();
|
||||||
tf.dispose(handImage);
|
tf.dispose(handImage);
|
||||||
const confidence = (await confidenceT.data())[0];
|
const confidence = (await confidenceT.data())[0];
|
||||||
|
|
|
@ -87,7 +87,7 @@ export function dot(v1, v2) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getColumnFrom2DArr(arr, columnIndex) {
|
export function getColumnFrom2DArr(arr, columnIndex) {
|
||||||
const column: Array<number> = [];
|
const column: number[] = [];
|
||||||
for (let i = 0; i < arr.length; i++) {
|
for (let i = 0; i < arr.length; i++) {
|
||||||
column.push(arr[i][columnIndex]);
|
column.push(arr[i][columnIndex]);
|
||||||
}
|
}
|
||||||
|
@ -95,7 +95,7 @@ export function getColumnFrom2DArr(arr, columnIndex) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function multiplyTransformMatrices(mat1, mat2) {
|
export function multiplyTransformMatrices(mat1, mat2) {
|
||||||
const product: Array<number[]> = [];
|
const product: number[][] = [];
|
||||||
const size = mat1.length;
|
const size = mat1.length;
|
||||||
for (let row = 0; row < size; row++) {
|
for (let row = 0; row < size; row++) {
|
||||||
product.push([]);
|
product.push([]);
|
||||||
|
|
|
@ -34,7 +34,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
let outputSize: [number, number] = [0, 0];
|
let outputSize: [number, number] = [0, 0];
|
||||||
|
|
||||||
type HandDetectResult = {
|
interface HandDetectResult {
|
||||||
id: number,
|
id: number,
|
||||||
score: number,
|
score: number,
|
||||||
box: Box,
|
box: Box,
|
||||||
|
@ -43,8 +43,8 @@ type HandDetectResult = {
|
||||||
}
|
}
|
||||||
|
|
||||||
const cache: {
|
const cache: {
|
||||||
boxes: Array<HandDetectResult>,
|
boxes: HandDetectResult[],
|
||||||
hands: Array<HandResult>;
|
hands: HandResult[];
|
||||||
} = {
|
} = {
|
||||||
boxes: [],
|
boxes: [],
|
||||||
hands: [],
|
hands: [],
|
||||||
|
@ -112,7 +112,7 @@ async function detectHands(input: Tensor, config: Config): Promise<HandDetectRes
|
||||||
[t.rawScores, t.rawBoxes] = await models[0].executeAsync(t.cast, modelOutputNodes) as Tensor[];
|
[t.rawScores, t.rawBoxes] = await models[0].executeAsync(t.cast, modelOutputNodes) as Tensor[];
|
||||||
t.boxes = tf.squeeze(t.rawBoxes, [0, 2]);
|
t.boxes = tf.squeeze(t.rawBoxes, [0, 2]);
|
||||||
t.scores = tf.squeeze(t.rawScores, [0]);
|
t.scores = tf.squeeze(t.rawScores, [0]);
|
||||||
const classScores: Array<Tensor> = tf.unstack(t.scores, 1); // unstack scores based on classes
|
const classScores: Tensor[] = tf.unstack(t.scores, 1); // unstack scores based on classes
|
||||||
tf.dispose(classScores[faceIndex]);
|
tf.dispose(classScores[faceIndex]);
|
||||||
classScores.splice(faceIndex, 1); // remove faces
|
classScores.splice(faceIndex, 1); // remove faces
|
||||||
t.filtered = tf.stack(classScores, 1); // restack
|
t.filtered = tf.stack(classScores, 1); // restack
|
||||||
|
@ -182,7 +182,7 @@ async function detectFingers(input: Tensor, h: HandDetectResult, config: Config)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(input: Tensor, config: Config): Promise<HandResult[]> {
|
export async function predict(input: Tensor, config: Config): Promise<HandResult[]> {
|
||||||
if (!models[0] || !models[1] || !models[0]?.inputs[0].shape || !models[1]?.inputs[0].shape) return []; // something is wrong with the model
|
if (!models[0] || !models[1] || !models[0].inputs[0].shape || !models[1].inputs[0].shape) return []; // something is wrong with the model
|
||||||
outputSize = [input.shape[2] || 0, input.shape[1] || 0];
|
outputSize = [input.shape[2] || 0, input.shape[1] || 0];
|
||||||
skipped++; // increment skip frames
|
skipped++; // increment skip frames
|
||||||
const skipTime = (config.hand.skipTime || 0) > (now() - lastTime);
|
const skipTime = (config.hand.skipTime || 0) > (now() - lastTime);
|
||||||
|
|
28
src/human.ts
28
src/human.ts
|
@ -130,7 +130,7 @@ export class Human {
|
||||||
? 'https://vladmandic.github.io/tfjs/dist/'
|
? 'https://vladmandic.github.io/tfjs/dist/'
|
||||||
: `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`;
|
: `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`;
|
||||||
*/
|
*/
|
||||||
const tfVersion = (tf.version?.tfjs || tf.version_core).replace(/-(.*)/, '');
|
const tfVersion = (tf.version.tfjs || tf.version_core).replace(/-(.*)/, '');
|
||||||
defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tfVersion}/dist/`;
|
defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tfVersion}/dist/`;
|
||||||
defaults.modelBasePath = env.browser ? '../models/' : 'file://models/';
|
defaults.modelBasePath = env.browser ? '../models/' : 'file://models/';
|
||||||
defaults.backend = env.browser ? 'humangl' : 'tensorflow';
|
defaults.backend = env.browser ? 'humangl' : 'tensorflow';
|
||||||
|
@ -152,7 +152,7 @@ export class Human {
|
||||||
this.models = new models.Models();
|
this.models = new models.Models();
|
||||||
// reexport draw methods
|
// reexport draw methods
|
||||||
this.draw = {
|
this.draw = {
|
||||||
options: draw.options as DrawOptions,
|
options: draw.options,
|
||||||
canvas: (input: AnyCanvas | HTMLImageElement | HTMLVideoElement, output: AnyCanvas) => draw.canvas(input, output),
|
canvas: (input: AnyCanvas | HTMLImageElement | HTMLVideoElement, output: AnyCanvas) => draw.canvas(input, output),
|
||||||
face: (output: AnyCanvas, result: FaceResult[], options?: Partial<DrawOptions>) => draw.face(output, result, options),
|
face: (output: AnyCanvas, result: FaceResult[], options?: Partial<DrawOptions>) => draw.face(output, result, options),
|
||||||
body: (output: AnyCanvas, result: BodyResult[], options?: Partial<DrawOptions>) => draw.body(output, result, options),
|
body: (output: AnyCanvas, result: BodyResult[], options?: Partial<DrawOptions>) => draw.body(output, result, options),
|
||||||
|
@ -164,7 +164,6 @@ export class Human {
|
||||||
};
|
};
|
||||||
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [], error: null };
|
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [], error: null };
|
||||||
// export access to image processing
|
// export access to image processing
|
||||||
// @ts-ignore eslint-typescript cannot correctly infer type in anonymous function
|
|
||||||
this.process = { tensor: null, canvas: null };
|
this.process = { tensor: null, canvas: null };
|
||||||
// export raw access to underlying models
|
// export raw access to underlying models
|
||||||
this.faceTriangulation = facemesh.triangulation;
|
this.faceTriangulation = facemesh.triangulation;
|
||||||
|
@ -225,7 +224,7 @@ export class Human {
|
||||||
public match = match.match;
|
public match = match.match;
|
||||||
|
|
||||||
/** Utility wrapper for performance.now() */
|
/** Utility wrapper for performance.now() */
|
||||||
now(): number {
|
now(): number { // eslint-disable-line class-methods-use-this
|
||||||
return now();
|
return now();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -258,8 +257,7 @@ export class Human {
|
||||||
* @param input - Tensor as provided in human.result.face[n].tensor
|
* @param input - Tensor as provided in human.result.face[n].tensor
|
||||||
* @returns Tensor
|
* @returns Tensor
|
||||||
*/
|
*/
|
||||||
// eslint-disable-next-line class-methods-use-this
|
enhance(input: Tensor): Tensor | null { // eslint-disable-line class-methods-use-this
|
||||||
enhance(input: Tensor): Tensor | null {
|
|
||||||
return faceres.enhance(input);
|
return faceres.enhance(input);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -303,7 +301,7 @@ export class Human {
|
||||||
if (this.env.browser) {
|
if (this.env.browser) {
|
||||||
if (this.config.debug) log('configuration:', this.config);
|
if (this.config.debug) log('configuration:', this.config);
|
||||||
if (this.config.debug) log('environment:', this.env);
|
if (this.config.debug) log('environment:', this.env);
|
||||||
if (this.config.debug) log('tf flags:', this.tf.ENV['flags']);
|
if (this.config.debug) log('tf flags:', this.tf.ENV.flags);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -313,17 +311,17 @@ export class Human {
|
||||||
|
|
||||||
const loaded = Object.values(this.models).filter((model) => model).length;
|
const loaded = Object.values(this.models).filter((model) => model).length;
|
||||||
if (loaded !== count) { // number of loaded models changed
|
if (loaded !== count) { // number of loaded models changed
|
||||||
await models.validate(this); // validate kernel ops used by model against current backend
|
models.validate(this); // validate kernel ops used by model against current backend
|
||||||
this.emit('load');
|
this.emit('load');
|
||||||
}
|
}
|
||||||
|
|
||||||
const current = Math.trunc(now() - timeStamp);
|
const current = Math.trunc(now() - timeStamp);
|
||||||
if (current > (this.performance.loadModels as number || 0)) this.performance.loadModels = this.env.perfadd ? (this.performance.loadModels || 0) + current : current;
|
if (current > (this.performance.loadModels || 0)) this.performance.loadModels = this.env.perfadd ? (this.performance.loadModels || 0) + current : current;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** emit event */
|
/** emit event */
|
||||||
emit = (event: string) => {
|
emit = (event: string) => {
|
||||||
if (this.events && this.events.dispatchEvent) this.events?.dispatchEvent(new Event(event));
|
if (this.events && this.events.dispatchEvent) this.events.dispatchEvent(new Event(event));
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Runs interpolation using last known result and returns smoothened result
|
/** Runs interpolation using last known result and returns smoothened result
|
||||||
|
@ -333,7 +331,7 @@ export class Human {
|
||||||
* @returns result - {@link Result}
|
* @returns result - {@link Result}
|
||||||
*/
|
*/
|
||||||
next(result: Result = this.result): Result {
|
next(result: Result = this.result): Result {
|
||||||
return interpolate.calc(result, this.config) as Result;
|
return interpolate.calc(result, this.config);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** get model loading/loaded stats */
|
/** get model loading/loaded stats */
|
||||||
|
@ -357,7 +355,7 @@ export class Human {
|
||||||
* - result object will contain total exeuction time information for top-20 kernels
|
* - result object will contain total exeuction time information for top-20 kernels
|
||||||
* - actual detection object can be accessed via `human.result`
|
* - actual detection object can be accessed via `human.result`
|
||||||
*/
|
*/
|
||||||
async profile(input: Input, userConfig?: Partial<Config>): Promise<Array<{ kernel: string, time: number, perc: number }>> {
|
async profile(input: Input, userConfig?: Partial<Config>): Promise<{ kernel: string, time: number, perc: number }[]> {
|
||||||
const profile = await this.tf.profile(() => this.detect(input, userConfig));
|
const profile = await this.tf.profile(() => this.detect(input, userConfig));
|
||||||
const kernels: Record<string, number> = {};
|
const kernels: Record<string, number> = {};
|
||||||
let total = 0;
|
let total = 0;
|
||||||
|
@ -366,7 +364,7 @@ export class Human {
|
||||||
else kernels[kernel.name] = kernel.kernelTimeMs;
|
else kernels[kernel.name] = kernel.kernelTimeMs;
|
||||||
total += kernel.kernelTimeMs;
|
total += kernel.kernelTimeMs;
|
||||||
}
|
}
|
||||||
const kernelArr: Array<{ kernel: string, time: number, perc: number }> = [];
|
const kernelArr: { kernel: string, time: number, perc: number }[] = [];
|
||||||
Object.entries(kernels).forEach((key) => kernelArr.push({ kernel: key[0], time: key[1] as unknown as number, perc: 0 })); // convert to array
|
Object.entries(kernels).forEach((key) => kernelArr.push({ kernel: key[0], time: key[1] as unknown as number, perc: 0 })); // convert to array
|
||||||
for (const kernel of kernelArr) {
|
for (const kernel of kernelArr) {
|
||||||
kernel.perc = Math.round(1000 * kernel.time / total) / 1000;
|
kernel.perc = Math.round(1000 * kernel.time / total) / 1000;
|
||||||
|
@ -433,7 +431,7 @@ export class Human {
|
||||||
this.config.skipAllowed = await image.skip(this.config, img.tensor);
|
this.config.skipAllowed = await image.skip(this.config, img.tensor);
|
||||||
if (!this.performance.totalFrames) this.performance.totalFrames = 0;
|
if (!this.performance.totalFrames) this.performance.totalFrames = 0;
|
||||||
if (!this.performance.cachedFrames) this.performance.cachedFrames = 0;
|
if (!this.performance.cachedFrames) this.performance.cachedFrames = 0;
|
||||||
(this.performance.totalFrames as number)++;
|
(this.performance.totalFrames)++;
|
||||||
if (this.config.skipAllowed) this.performance.cachedFrames++;
|
if (this.config.skipAllowed) this.performance.cachedFrames++;
|
||||||
this.performance.cacheCheck = this.env.perfadd ? (this.performance.cacheCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
|
this.performance.cacheCheck = this.env.perfadd ? (this.performance.cacheCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
|
||||||
this.analyze('Check Changed:');
|
this.analyze('Check Changed:');
|
||||||
|
@ -524,7 +522,7 @@ export class Human {
|
||||||
}
|
}
|
||||||
|
|
||||||
this.performance.total = this.env.perfadd ? (this.performance.total || 0) + Math.trunc(now() - timeStart) : Math.trunc(now() - timeStart);
|
this.performance.total = this.env.perfadd ? (this.performance.total || 0) + Math.trunc(now() - timeStart) : Math.trunc(now() - timeStart);
|
||||||
const shape = this.process?.tensor?.shape || [];
|
const shape = this.process.tensor?.shape || [];
|
||||||
this.result = {
|
this.result = {
|
||||||
face: faceRes as FaceResult[],
|
face: faceRes as FaceResult[],
|
||||||
body: bodyRes as BodyResult[],
|
body: bodyRes as BodyResult[],
|
||||||
|
|
|
@ -81,7 +81,7 @@ export async function process(input: Input, config: Config, getTensor: boolean =
|
||||||
if (input instanceof tf.Tensor) { // if input is tensor use as-is without filters but correct shape as needed
|
if (input instanceof tf.Tensor) { // if input is tensor use as-is without filters but correct shape as needed
|
||||||
let tensor: Tensor | null = null;
|
let tensor: Tensor | null = null;
|
||||||
if ((input as Tensor)['isDisposedInternal']) throw new Error('input error: attempted to use tensor but it is disposed');
|
if ((input as Tensor)['isDisposedInternal']) throw new Error('input error: attempted to use tensor but it is disposed');
|
||||||
if (!(input as Tensor)['shape']) throw new Error('input error: attempted to use tensor without a shape');
|
if (!(input as Tensor).shape) throw new Error('input error: attempted to use tensor without a shape');
|
||||||
if ((input as Tensor).shape.length === 3) { // [height, width, 3 || 4]
|
if ((input as Tensor).shape.length === 3) { // [height, width, 3 || 4]
|
||||||
if ((input as Tensor).shape[2] === 3) { // [height, width, 3] so add batch
|
if ((input as Tensor).shape[2] === 3) { // [height, width, 3] so add batch
|
||||||
tensor = tf.expandDims(input, 0);
|
tensor = tf.expandDims(input, 0);
|
||||||
|
@ -98,140 +98,139 @@ export async function process(input: Input, config: Config, getTensor: boolean =
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// at the end shape must be [1, height, width, 3]
|
// at the end shape must be [1, height, width, 3]
|
||||||
if (tensor == null || tensor.shape.length !== 4 || tensor.shape[0] !== 1 || tensor.shape[3] !== 3) throw new Error(`input error: attempted to use tensor with unrecognized shape: ${input['shape']}`);
|
if (tensor == null || (tensor as Tensor).shape.length !== 4 || (tensor as Tensor).shape[0] !== 1 || (tensor as Tensor).shape[3] !== 3) throw new Error(`input error: attempted to use tensor with unrecognized shape: ${(input as Tensor).shape}`);
|
||||||
if ((tensor as Tensor).dtype === 'int32') {
|
if ((tensor).dtype === 'int32') {
|
||||||
const cast = tf.cast(tensor, 'float32');
|
const cast = tf.cast(tensor, 'float32');
|
||||||
tf.dispose(tensor);
|
tf.dispose(tensor);
|
||||||
tensor = cast;
|
tensor = cast;
|
||||||
}
|
}
|
||||||
return { tensor, canvas: (config.filter.return ? outCanvas : null) };
|
return { tensor, canvas: (config.filter.return ? outCanvas : null) };
|
||||||
} else {
|
|
||||||
// check if resizing will be needed
|
|
||||||
if (typeof input['readyState'] !== 'undefined' && input['readyState'] <= 2) {
|
|
||||||
if (config.debug) log('input stream is not ready');
|
|
||||||
return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
|
|
||||||
}
|
|
||||||
const originalWidth = input['naturalWidth'] || input['videoWidth'] || input['width'] || (input['shape'] && (input['shape'][1] > 0));
|
|
||||||
const originalHeight = input['naturalHeight'] || input['videoHeight'] || input['height'] || (input['shape'] && (input['shape'][2] > 0));
|
|
||||||
if (!originalWidth || !originalHeight) {
|
|
||||||
if (config.debug) log('cannot determine input dimensions');
|
|
||||||
return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
|
|
||||||
}
|
|
||||||
let targetWidth = originalWidth;
|
|
||||||
let targetHeight = originalHeight;
|
|
||||||
if (targetWidth > maxSize) {
|
|
||||||
targetWidth = maxSize;
|
|
||||||
targetHeight = Math.trunc(targetWidth * originalHeight / originalWidth);
|
|
||||||
}
|
|
||||||
if (targetHeight > maxSize) {
|
|
||||||
targetHeight = maxSize;
|
|
||||||
targetWidth = Math.trunc(targetHeight * originalWidth / originalHeight);
|
|
||||||
}
|
|
||||||
|
|
||||||
// create our canvas and resize it if needed
|
|
||||||
if ((config.filter.width || 0) > 0) targetWidth = config.filter.width;
|
|
||||||
else if ((config.filter.height || 0) > 0) targetWidth = originalWidth * ((config.filter.height || 0) / originalHeight);
|
|
||||||
if ((config.filter.height || 0) > 0) targetHeight = config.filter.height;
|
|
||||||
else if ((config.filter.width || 0) > 0) targetHeight = originalHeight * ((config.filter.width || 0) / originalWidth);
|
|
||||||
if (!targetWidth || !targetHeight) throw new Error('input error: cannot determine dimension');
|
|
||||||
if (!inCanvas || (inCanvas?.width !== targetWidth) || (inCanvas?.height !== targetHeight)) inCanvas = canvas(targetWidth, targetHeight);
|
|
||||||
|
|
||||||
// draw input to our canvas
|
|
||||||
const inCtx = inCanvas.getContext('2d') as CanvasRenderingContext2D;
|
|
||||||
if ((typeof ImageData !== 'undefined') && (input instanceof ImageData)) {
|
|
||||||
inCtx.putImageData(input, 0, 0);
|
|
||||||
} else {
|
|
||||||
if (config.filter.flip && typeof inCtx.translate !== 'undefined') {
|
|
||||||
inCtx.translate(originalWidth, 0);
|
|
||||||
inCtx.scale(-1, 1);
|
|
||||||
inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
|
|
||||||
inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults
|
|
||||||
} else {
|
|
||||||
inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas?.height !== outCanvas?.height)) outCanvas = canvas(inCanvas.width, inCanvas.height); // init output canvas
|
|
||||||
|
|
||||||
// imagefx transforms using gl from input canvas to output canvas
|
|
||||||
if (config.filter.enabled && env.webgl.supported) {
|
|
||||||
if (!fx) fx = env.browser ? new fxImage.GLImageFilter() : null; // && (typeof document !== 'undefined')
|
|
||||||
env.filter = !!fx;
|
|
||||||
if (!fx || !fx.add) {
|
|
||||||
if (config.debug) log('input process error: cannot initialize filters');
|
|
||||||
env.webgl.supported = false;
|
|
||||||
config.filter.enabled = false;
|
|
||||||
copy(inCanvas, outCanvas); // filter failed to initialize
|
|
||||||
// return { tensor: null, canvas: inCanvas };
|
|
||||||
} else {
|
|
||||||
fx.reset();
|
|
||||||
if (config.filter.brightness !== 0) fx.add('brightness', config.filter.brightness);
|
|
||||||
if (config.filter.contrast !== 0) fx.add('contrast', config.filter.contrast);
|
|
||||||
if (config.filter.sharpness !== 0) fx.add('sharpen', config.filter.sharpness);
|
|
||||||
if (config.filter.blur !== 0) fx.add('blur', config.filter.blur);
|
|
||||||
if (config.filter.saturation !== 0) fx.add('saturation', config.filter.saturation);
|
|
||||||
if (config.filter.hue !== 0) fx.add('hue', config.filter.hue);
|
|
||||||
if (config.filter.negative) fx.add('negative');
|
|
||||||
if (config.filter.sepia) fx.add('sepia');
|
|
||||||
if (config.filter.vintage) fx.add('brownie');
|
|
||||||
if (config.filter.sepia) fx.add('sepia');
|
|
||||||
if (config.filter.kodachrome) fx.add('kodachrome');
|
|
||||||
if (config.filter.technicolor) fx.add('technicolor');
|
|
||||||
if (config.filter.polaroid) fx.add('polaroid');
|
|
||||||
if (config.filter.pixelate !== 0) fx.add('pixelate', config.filter.pixelate);
|
|
||||||
if (fx.get() > 0) outCanvas = fx.apply(inCanvas);
|
|
||||||
else outCanvas = fx.draw(inCanvas);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
copy(inCanvas, outCanvas); // if no filters applied, output canvas is input canvas
|
|
||||||
if (fx) fx = null;
|
|
||||||
env.filter = !!fx;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!getTensor) return { tensor: null, canvas: outCanvas }; // just canvas was requested
|
|
||||||
if (!outCanvas) throw new Error('canvas error: cannot create output');
|
|
||||||
|
|
||||||
// create tensor from image unless input was a tensor already
|
|
||||||
let pixels;
|
|
||||||
let depth = 3;
|
|
||||||
if ((typeof ImageData !== 'undefined' && input instanceof ImageData) || (input['data'] && input['width'] && input['height'])) { // if input is imagedata, just use it
|
|
||||||
if (env.browser && tf.browser) {
|
|
||||||
pixels = tf.browser ? tf.browser.fromPixels(input) : null;
|
|
||||||
} else {
|
|
||||||
depth = input['data'].length / input['height'] / input['width'];
|
|
||||||
// const arr = Uint8Array.from(input['data']);
|
|
||||||
const arr = new Uint8Array(input['data']['buffer']);
|
|
||||||
pixels = tf.tensor(arr, [input['height'], input['width'], depth], 'int32');
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (!tmpCanvas || (outCanvas.width !== tmpCanvas.width) || (outCanvas.height !== tmpCanvas.height)) tmpCanvas = canvas(outCanvas.width, outCanvas.height); // init output canvas
|
|
||||||
if (tf.browser && env.browser) {
|
|
||||||
if (config.backend === 'webgl' || config.backend === 'humangl' || config.backend === 'webgpu') {
|
|
||||||
pixels = tf.browser.fromPixels(outCanvas); // safe to reuse since both backend and context are gl based
|
|
||||||
} else {
|
|
||||||
tmpCanvas = copy(outCanvas); // cannot use output canvas as it already has gl context so we do a silly one more canvas
|
|
||||||
pixels = tf.browser.fromPixels(tmpCanvas);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const tempCanvas = copy(outCanvas); // cannot use output canvas as it already has gl context so we do a silly one more canvas
|
|
||||||
const tempCtx = tempCanvas.getContext('2d') as CanvasRenderingContext2D;
|
|
||||||
const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
|
||||||
depth = tempData.data.length / targetWidth / targetHeight;
|
|
||||||
const arr = new Uint8Array(tempData.data.buffer);
|
|
||||||
pixels = tf.tensor(arr, [targetWidth, targetHeight, depth]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (depth === 4) { // rgba to rgb
|
|
||||||
const rgb = tf.slice3d(pixels, [0, 0, 0], [-1, -1, 3]); // strip alpha channel
|
|
||||||
tf.dispose(pixels);
|
|
||||||
pixels = rgb;
|
|
||||||
}
|
|
||||||
if (!pixels) throw new Error('input error: cannot create tensor');
|
|
||||||
const casted = tf.cast(pixels, 'float32');
|
|
||||||
const tensor = config.filter.equalization ? await enhance.histogramEqualization(casted) : tf.expandDims(casted, 0);
|
|
||||||
tf.dispose([pixels, casted]);
|
|
||||||
return { tensor, canvas: (config.filter.return ? outCanvas : null) };
|
|
||||||
}
|
}
|
||||||
|
// check if resizing will be needed
|
||||||
|
if (typeof input['readyState'] !== 'undefined' && (input as HTMLMediaElement).readyState <= 2) {
|
||||||
|
if (config.debug) log('input stream is not ready');
|
||||||
|
return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
|
||||||
|
}
|
||||||
|
const originalWidth = input['naturalWidth'] || input['videoWidth'] || input['width'] || (input['shape'] && (input['shape'][1] > 0));
|
||||||
|
const originalHeight = input['naturalHeight'] || input['videoHeight'] || input['height'] || (input['shape'] && (input['shape'][2] > 0));
|
||||||
|
if (!originalWidth || !originalHeight) {
|
||||||
|
if (config.debug) log('cannot determine input dimensions');
|
||||||
|
return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
|
||||||
|
}
|
||||||
|
let targetWidth = originalWidth;
|
||||||
|
let targetHeight = originalHeight;
|
||||||
|
if (targetWidth > maxSize) {
|
||||||
|
targetWidth = maxSize;
|
||||||
|
targetHeight = Math.trunc(targetWidth * originalHeight / originalWidth);
|
||||||
|
}
|
||||||
|
if (targetHeight > maxSize) {
|
||||||
|
targetHeight = maxSize;
|
||||||
|
targetWidth = Math.trunc(targetHeight * originalWidth / originalHeight);
|
||||||
|
}
|
||||||
|
|
||||||
|
// create our canvas and resize it if needed
|
||||||
|
if ((config.filter.width || 0) > 0) targetWidth = config.filter.width;
|
||||||
|
else if ((config.filter.height || 0) > 0) targetWidth = originalWidth * ((config.filter.height || 0) / originalHeight);
|
||||||
|
if ((config.filter.height || 0) > 0) targetHeight = config.filter.height;
|
||||||
|
else if ((config.filter.width || 0) > 0) targetHeight = originalHeight * ((config.filter.width || 0) / originalWidth);
|
||||||
|
if (!targetWidth || !targetHeight) throw new Error('input error: cannot determine dimension');
|
||||||
|
if (!inCanvas || (inCanvas.width !== targetWidth) || (inCanvas.height !== targetHeight)) inCanvas = canvas(targetWidth, targetHeight);
|
||||||
|
|
||||||
|
// draw input to our canvas
|
||||||
|
const inCtx = inCanvas.getContext('2d') as CanvasRenderingContext2D;
|
||||||
|
if ((typeof ImageData !== 'undefined') && (input instanceof ImageData)) {
|
||||||
|
inCtx.putImageData(input, 0, 0);
|
||||||
|
} else {
|
||||||
|
if (config.filter.flip && typeof inCtx.translate !== 'undefined') {
|
||||||
|
inCtx.translate(originalWidth, 0);
|
||||||
|
inCtx.scale(-1, 1);
|
||||||
|
inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
|
||||||
|
inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults
|
||||||
|
} else {
|
||||||
|
inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas.height !== outCanvas.height)) outCanvas = canvas(inCanvas.width, inCanvas.height); // init output canvas
|
||||||
|
|
||||||
|
// imagefx transforms using gl from input canvas to output canvas
|
||||||
|
if (config.filter.enabled && env.webgl.supported) {
|
||||||
|
if (!fx) fx = env.browser ? new fxImage.GLImageFilter() : null; // && (typeof document !== 'undefined')
|
||||||
|
env.filter = !!fx;
|
||||||
|
if (!fx || !fx.add) {
|
||||||
|
if (config.debug) log('input process error: cannot initialize filters');
|
||||||
|
env.webgl.supported = false;
|
||||||
|
config.filter.enabled = false;
|
||||||
|
copy(inCanvas, outCanvas); // filter failed to initialize
|
||||||
|
// return { tensor: null, canvas: inCanvas };
|
||||||
|
} else {
|
||||||
|
fx.reset();
|
||||||
|
if (config.filter.brightness !== 0) fx.add('brightness', config.filter.brightness);
|
||||||
|
if (config.filter.contrast !== 0) fx.add('contrast', config.filter.contrast);
|
||||||
|
if (config.filter.sharpness !== 0) fx.add('sharpen', config.filter.sharpness);
|
||||||
|
if (config.filter.blur !== 0) fx.add('blur', config.filter.blur);
|
||||||
|
if (config.filter.saturation !== 0) fx.add('saturation', config.filter.saturation);
|
||||||
|
if (config.filter.hue !== 0) fx.add('hue', config.filter.hue);
|
||||||
|
if (config.filter.negative) fx.add('negative');
|
||||||
|
if (config.filter.sepia) fx.add('sepia');
|
||||||
|
if (config.filter.vintage) fx.add('brownie');
|
||||||
|
if (config.filter.sepia) fx.add('sepia');
|
||||||
|
if (config.filter.kodachrome) fx.add('kodachrome');
|
||||||
|
if (config.filter.technicolor) fx.add('technicolor');
|
||||||
|
if (config.filter.polaroid) fx.add('polaroid');
|
||||||
|
if (config.filter.pixelate !== 0) fx.add('pixelate', config.filter.pixelate);
|
||||||
|
if (fx.get() > 0) outCanvas = fx.apply(inCanvas);
|
||||||
|
else outCanvas = fx.draw(inCanvas);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
copy(inCanvas, outCanvas); // if no filters applied, output canvas is input canvas
|
||||||
|
if (fx) fx = null;
|
||||||
|
env.filter = !!fx;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!getTensor) return { tensor: null, canvas: outCanvas }; // just canvas was requested
|
||||||
|
if (!outCanvas) throw new Error('canvas error: cannot create output');
|
||||||
|
|
||||||
|
// create tensor from image unless input was a tensor already
|
||||||
|
let pixels;
|
||||||
|
let depth = 3;
|
||||||
|
if ((typeof ImageData !== 'undefined' && input instanceof ImageData) || ((input as ImageData).data && (input as ImageData).width && (input as ImageData).height)) { // if input is imagedata, just use it
|
||||||
|
if (env.browser && tf.browser) {
|
||||||
|
pixels = tf.browser ? tf.browser.fromPixels(input) : null;
|
||||||
|
} else {
|
||||||
|
depth = (input as ImageData).data.length / (input as ImageData).height / (input as ImageData).width;
|
||||||
|
// const arr = Uint8Array.from(input['data']);
|
||||||
|
const arr = new Uint8Array((input as ImageData).data.buffer);
|
||||||
|
pixels = tf.tensor(arr, [(input as ImageData).height, (input as ImageData).width, depth], 'int32');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!tmpCanvas || (outCanvas.width !== tmpCanvas.width) || (outCanvas.height !== tmpCanvas.height)) tmpCanvas = canvas(outCanvas.width, outCanvas.height); // init output canvas
|
||||||
|
if (tf.browser && env.browser) {
|
||||||
|
if (config.backend === 'webgl' || config.backend === 'humangl' || config.backend === 'webgpu') {
|
||||||
|
pixels = tf.browser.fromPixels(outCanvas); // safe to reuse since both backend and context are gl based
|
||||||
|
} else {
|
||||||
|
tmpCanvas = copy(outCanvas); // cannot use output canvas as it already has gl context so we do a silly one more canvas
|
||||||
|
pixels = tf.browser.fromPixels(tmpCanvas);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const tempCanvas = copy(outCanvas); // cannot use output canvas as it already has gl context so we do a silly one more canvas
|
||||||
|
const tempCtx = tempCanvas.getContext('2d') as CanvasRenderingContext2D;
|
||||||
|
const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
||||||
|
depth = tempData.data.length / targetWidth / targetHeight;
|
||||||
|
const arr = new Uint8Array(tempData.data.buffer);
|
||||||
|
pixels = tf.tensor(arr, [targetWidth, targetHeight, depth]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (depth === 4) { // rgba to rgb
|
||||||
|
const rgb = tf.slice3d(pixels, [0, 0, 0], [-1, -1, 3]); // strip alpha channel
|
||||||
|
tf.dispose(pixels);
|
||||||
|
pixels = rgb;
|
||||||
|
}
|
||||||
|
if (!pixels) throw new Error('input error: cannot create tensor');
|
||||||
|
const casted = tf.cast(pixels, 'float32');
|
||||||
|
const tensor = config.filter.equalization ? await enhance.histogramEqualization(casted) : tf.expandDims(casted, 0);
|
||||||
|
tf.dispose([pixels, casted]);
|
||||||
|
return { tensor, canvas: (config.filter.return ? outCanvas : null) };
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -3,6 +3,8 @@
|
||||||
* Based on: [WebGLImageFilter](https://github.com/phoboslab/WebGLImageFilter)
|
* Based on: [WebGLImageFilter](https://github.com/phoboslab/WebGLImageFilter)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/* eslint-disable func-names */
|
||||||
|
|
||||||
import * as shaders from './imagefxshaders';
|
import * as shaders from './imagefxshaders';
|
||||||
import { canvas } from './image';
|
import { canvas } from './image';
|
||||||
import { log } from '../util/util';
|
import { log } from '../util/util';
|
||||||
|
@ -47,7 +49,7 @@ class GLProgram {
|
||||||
}
|
}
|
||||||
|
|
||||||
compile = (source, type): WebGLShader | null => {
|
compile = (source, type): WebGLShader | null => {
|
||||||
const shader = this.gl.createShader(type) as WebGLShader;
|
const shader = this.gl.createShader(type);
|
||||||
if (!shader) {
|
if (!shader) {
|
||||||
log('filter: could not create shader');
|
log('filter: could not create shader');
|
||||||
return null;
|
return null;
|
||||||
|
@ -107,11 +109,11 @@ export function GLImageFilter() {
|
||||||
}
|
}
|
||||||
|
|
||||||
function createFramebufferTexture(width, height) {
|
function createFramebufferTexture(width, height) {
|
||||||
const fbo = gl.createFramebuffer() as WebGLFramebuffer;
|
const fbo = gl.createFramebuffer();
|
||||||
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
|
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
|
||||||
const renderbuffer = gl.createRenderbuffer();
|
const renderbuffer = gl.createRenderbuffer();
|
||||||
gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);
|
gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);
|
||||||
const texture = gl.createTexture() as WebGLTexture;
|
const texture = gl.createTexture();
|
||||||
gl.bindTexture(gl.TEXTURE_2D, texture);
|
gl.bindTexture(gl.TEXTURE_2D, texture);
|
||||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
|
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
|
||||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
||||||
|
@ -154,7 +156,7 @@ export function GLImageFilter() {
|
||||||
if (shaderProgramCache[fragmentSource]) {
|
if (shaderProgramCache[fragmentSource]) {
|
||||||
currentProgram = shaderProgramCache[fragmentSource];
|
currentProgram = shaderProgramCache[fragmentSource];
|
||||||
gl.useProgram((currentProgram ? currentProgram.id : null) || null);
|
gl.useProgram((currentProgram ? currentProgram.id : null) || null);
|
||||||
return currentProgram as GLProgram;
|
return currentProgram;
|
||||||
}
|
}
|
||||||
currentProgram = new GLProgram(gl, shaders.vertexIdentity, fragmentSource);
|
currentProgram = new GLProgram(gl, shaders.vertexIdentity, fragmentSource);
|
||||||
if (!currentProgram) {
|
if (!currentProgram) {
|
||||||
|
@ -168,7 +170,7 @@ export function GLImageFilter() {
|
||||||
gl.enableVertexAttribArray(currentProgram.attribute['uv']);
|
gl.enableVertexAttribArray(currentProgram.attribute['uv']);
|
||||||
gl.vertexAttribPointer(currentProgram.attribute['uv'], 2, gl.FLOAT, false, vertSize, 2 * floatSize);
|
gl.vertexAttribPointer(currentProgram.attribute['uv'], 2, gl.FLOAT, false, vertSize, 2 * floatSize);
|
||||||
shaderProgramCache[fragmentSource] = currentProgram;
|
shaderProgramCache[fragmentSource] = currentProgram;
|
||||||
return currentProgram as GLProgram;
|
return currentProgram;
|
||||||
}
|
}
|
||||||
|
|
||||||
const filter = {
|
const filter = {
|
||||||
|
@ -397,8 +399,7 @@ export function GLImageFilter() {
|
||||||
|
|
||||||
// @ts-ignore this
|
// @ts-ignore this
|
||||||
this.add = function (name) {
|
this.add = function (name) {
|
||||||
// eslint-disable-next-line prefer-rest-params
|
const args = Array.prototype.slice.call(arguments, 1); // eslint-disable-line prefer-rest-params
|
||||||
const args = Array.prototype.slice.call(arguments, 1);
|
|
||||||
const func = filter[name];
|
const func = filter[name];
|
||||||
filterChain.push({ func, args });
|
filterChain.push({ func, args });
|
||||||
};
|
};
|
||||||
|
|
|
@ -61,7 +61,7 @@ export class Models {
|
||||||
antispoof: null | GraphModel | Promise<GraphModel> = null;
|
antispoof: null | GraphModel | Promise<GraphModel> = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ModelStats = {
|
export interface ModelStats {
|
||||||
numLoadedModels: number,
|
numLoadedModels: number,
|
||||||
numEnabledModels: undefined,
|
numEnabledModels: undefined,
|
||||||
numDefinedModels: number,
|
numDefinedModels: number,
|
||||||
|
@ -112,12 +112,11 @@ export async function load(instance: Human): Promise<void> {
|
||||||
[instance.models.handpose, instance.models.handskeleton] = await handpose.load(instance.config);
|
[instance.models.handpose, instance.models.handskeleton] = await handpose.load(instance.config);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (instance.config.body.enabled && !instance.models.blazepose && instance.config.body?.modelPath?.includes('blazepose')) instance.models.blazepose = blazepose.loadPose(instance.config);
|
if (instance.config.body.enabled && !instance.models.blazepose && instance.config.body.modelPath?.includes('blazepose')) instance.models.blazepose = blazepose.loadPose(instance.config);
|
||||||
// @ts-ignore optional model
|
if (instance.config.body.enabled && !instance.models.blazeposedetect && instance.config.body['detector'] && instance.config.body['detector'].modelPath) instance.models.blazeposedetect = blazepose.loadDetect(instance.config);
|
||||||
if (instance.config.body.enabled && !instance.models.blazeposedetect && instance.config.body['detector'] && instance.config.body['detector']['modelPath']) instance.models.blazeposedetect = blazepose.loadDetect(instance.config);
|
if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body.modelPath?.includes('efficientpose')) instance.models.efficientpose = efficientpose.load(instance.config);
|
||||||
if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body?.modelPath?.includes('efficientpose')) instance.models.efficientpose = efficientpose.load(instance.config);
|
if (instance.config.body.enabled && !instance.models.movenet && instance.config.body.modelPath?.includes('movenet')) instance.models.movenet = movenet.load(instance.config);
|
||||||
if (instance.config.body.enabled && !instance.models.movenet && instance.config.body?.modelPath?.includes('movenet')) instance.models.movenet = movenet.load(instance.config);
|
if (instance.config.body.enabled && !instance.models.posenet && instance.config.body.modelPath?.includes('posenet')) instance.models.posenet = posenet.load(instance.config);
|
||||||
if (instance.config.body.enabled && !instance.models.posenet && instance.config.body?.modelPath?.includes('posenet')) instance.models.posenet = posenet.load(instance.config);
|
|
||||||
if (instance.config.face.enabled && !instance.models.facedetect) instance.models.facedetect = blazeface.load(instance.config);
|
if (instance.config.face.enabled && !instance.models.facedetect) instance.models.facedetect = blazeface.load(instance.config);
|
||||||
if (instance.config.face.enabled && instance.config.face.antispoof?.enabled && !instance.models.antispoof) instance.models.antispoof = antispoof.load(instance.config);
|
if (instance.config.face.enabled && instance.config.face.antispoof?.enabled && !instance.models.antispoof) instance.models.antispoof = antispoof.load(instance.config);
|
||||||
if (instance.config.face.enabled && instance.config.face.liveness?.enabled && !instance.models.liveness) instance.models.liveness = liveness.load(instance.config);
|
if (instance.config.face.enabled && instance.config.face.liveness?.enabled && !instance.models.liveness) instance.models.liveness = liveness.load(instance.config);
|
||||||
|
@ -125,19 +124,15 @@ export async function load(instance: Human): Promise<void> {
|
||||||
if (instance.config.face.enabled && instance.config.face.emotion?.enabled && !instance.models.emotion) instance.models.emotion = emotion.load(instance.config);
|
if (instance.config.face.enabled && instance.config.face.emotion?.enabled && !instance.models.emotion) instance.models.emotion = emotion.load(instance.config);
|
||||||
if (instance.config.face.enabled && instance.config.face.iris?.enabled && !instance.config.face.attention?.enabled && !instance.models.faceiris) instance.models.faceiris = iris.load(instance.config);
|
if (instance.config.face.enabled && instance.config.face.iris?.enabled && !instance.config.face.attention?.enabled && !instance.models.faceiris) instance.models.faceiris = iris.load(instance.config);
|
||||||
if (instance.config.face.enabled && instance.config.face.mesh?.enabled && !instance.models.facemesh) instance.models.facemesh = facemesh.load(instance.config);
|
if (instance.config.face.enabled && instance.config.face.mesh?.enabled && !instance.models.facemesh) instance.models.facemesh = facemesh.load(instance.config);
|
||||||
// @ts-ignore optional model
|
|
||||||
if (instance.config.face.enabled && instance.config.face['gear']?.enabled && !instance.models.gear) instance.models.gear = gear.load(instance.config);
|
if (instance.config.face.enabled && instance.config.face['gear']?.enabled && !instance.models.gear) instance.models.gear = gear.load(instance.config);
|
||||||
// @ts-ignore optional model
|
|
||||||
if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetage) instance.models.ssrnetage = ssrnetAge.load(instance.config);
|
if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetage) instance.models.ssrnetage = ssrnetAge.load(instance.config);
|
||||||
// @ts-ignore optional model
|
|
||||||
if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetgender) instance.models.ssrnetgender = ssrnetGender.load(instance.config);
|
if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetgender) instance.models.ssrnetgender = ssrnetGender.load(instance.config);
|
||||||
// @ts-ignore optional model
|
|
||||||
if (instance.config.face.enabled && instance.config.face['mobilefacenet']?.enabled && !instance.models.mobilefacenet) instance.models.mobilefacenet = mobilefacenet.load(instance.config);
|
if (instance.config.face.enabled && instance.config.face['mobilefacenet']?.enabled && !instance.models.mobilefacenet) instance.models.mobilefacenet = mobilefacenet.load(instance.config);
|
||||||
if (instance.config.face.enabled && instance.config.face['insightface']?.enabled && !instance.models.insightface) instance.models.insightface = insightface.load(instance.config);
|
if (instance.config.face.enabled && instance.config.face['insightface']?.enabled && !instance.models.insightface) instance.models.insightface = insightface.load(instance.config);
|
||||||
if (instance.config.hand.enabled && !instance.models.handtrack && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handtrack = handtrack.loadDetect(instance.config);
|
if (instance.config.hand.enabled && !instance.models.handtrack && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handtrack = handtrack.loadDetect(instance.config);
|
||||||
if (instance.config.hand.enabled && instance.config.hand.landmarks && !instance.models.handskeleton && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handskeleton = handtrack.loadSkeleton(instance.config);
|
if (instance.config.hand.enabled && instance.config.hand.landmarks && !instance.models.handskeleton && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handskeleton = handtrack.loadSkeleton(instance.config);
|
||||||
if (instance.config.object.enabled && !instance.models.centernet && instance.config.object?.modelPath?.includes('centernet')) instance.models.centernet = centernet.load(instance.config);
|
if (instance.config.object.enabled && !instance.models.centernet && instance.config.object.modelPath?.includes('centernet')) instance.models.centernet = centernet.load(instance.config);
|
||||||
if (instance.config.object.enabled && !instance.models.nanodet && instance.config.object?.modelPath?.includes('nanodet')) instance.models.nanodet = nanodet.load(instance.config);
|
if (instance.config.object.enabled && !instance.models.nanodet && instance.config.object.modelPath?.includes('nanodet')) instance.models.nanodet = nanodet.load(instance.config);
|
||||||
if (instance.config.segmentation.enabled && !instance.models.segmentation) instance.models.segmentation = segmentation.load(instance.config);
|
if (instance.config.segmentation.enabled && !instance.models.segmentation) instance.models.segmentation = segmentation.load(instance.config);
|
||||||
|
|
||||||
// models are loaded in parallel asynchronously so lets wait until they are actually loaded
|
// models are loaded in parallel asynchronously so lets wait until they are actually loaded
|
||||||
|
@ -149,7 +144,7 @@ export async function load(instance: Human): Promise<void> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let instance: Human;
|
let instance: Human;
|
||||||
export type KernelOps = { name: string, url: string, missing: string[], ops: string[] }
|
export interface KernelOps { name: string, url: string, missing: string[], ops: string[] }
|
||||||
|
|
||||||
export function validateModel(newInstance: Human | null, model: GraphModel | null, name: string): KernelOps | null {
|
export function validateModel(newInstance: Human | null, model: GraphModel | null, name: string): KernelOps | null {
|
||||||
if (newInstance) instance = newInstance;
|
if (newInstance) instance = newInstance;
|
||||||
|
@ -161,10 +156,8 @@ export function validateModel(newInstance: Human | null, model: GraphModel | nul
|
||||||
const ops: string[] = [];
|
const ops: string[] = [];
|
||||||
const missing: string[] = [];
|
const missing: string[] = [];
|
||||||
interface Op { name: string, category: string, op: string }
|
interface Op { name: string, category: string, op: string }
|
||||||
// @ts-ignore // modelUrl is a private method
|
const url = model['modelUrl'] as string;
|
||||||
const url = model.modelUrl;
|
const executor = model['executor'];
|
||||||
// @ts-ignore // executor is a private method
|
|
||||||
const executor = model.executor;
|
|
||||||
if (executor && executor.graph.nodes) {
|
if (executor && executor.graph.nodes) {
|
||||||
for (const kernel of Object.values(executor.graph.nodes)) {
|
for (const kernel of Object.values(executor.graph.nodes)) {
|
||||||
const op = (kernel as Op).op.toLowerCase();
|
const op = (kernel as Op).op.toLowerCase();
|
||||||
|
@ -187,9 +180,9 @@ export function validateModel(newInstance: Human | null, model: GraphModel | nul
|
||||||
return missing.length > 0 ? { name, missing, ops, url } : null;
|
return missing.length > 0 ? { name, missing, ops, url } : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function validate(newInstance: Human): Array<{ name: string, missing: string[] }> {
|
export function validate(newInstance: Human): { name: string, missing: string[] }[] {
|
||||||
instance = newInstance;
|
instance = newInstance;
|
||||||
const missing: Array<KernelOps> = [];
|
const missing: KernelOps[] = [];
|
||||||
for (const defined of Object.keys(instance.models)) {
|
for (const defined of Object.keys(instance.models)) {
|
||||||
const model: GraphModel | null = instance.models[defined as keyof Models] as GraphModel | null;
|
const model: GraphModel | null = instance.models[defined as keyof Models] as GraphModel | null;
|
||||||
if (!model) continue;
|
if (!model) continue;
|
||||||
|
|
|
@ -33,7 +33,7 @@ export async function load(config: Config): Promise<GraphModel> {
|
||||||
async function process(res: Tensor | null, outputShape: [number, number], config: Config) {
|
async function process(res: Tensor | null, outputShape: [number, number], config: Config) {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const t: Record<string, Tensor> = {};
|
const t: Record<string, Tensor> = {};
|
||||||
const results: Array<ObjectResult> = [];
|
const results: ObjectResult[] = [];
|
||||||
const detections = await res.array() as number[][][];
|
const detections = await res.array() as number[][][];
|
||||||
t.squeeze = tf.squeeze(res);
|
t.squeeze = tf.squeeze(res);
|
||||||
const arr = tf.split(t.squeeze, 6, 1) as Tensor[]; // x1, y1, x2, y2, score, class
|
const arr = tf.split(t.squeeze, 6, 1) as Tensor[]; // x1, y1, x2, y2, score, class
|
||||||
|
|
|
@ -15,7 +15,7 @@ import type { Config } from '../config';
|
||||||
import { env } from '../util/env';
|
import { env } from '../util/env';
|
||||||
|
|
||||||
let model: GraphModel;
|
let model: GraphModel;
|
||||||
let last: Array<ObjectResult> = [];
|
let last: ObjectResult[] = [];
|
||||||
let lastTime = 0;
|
let lastTime = 0;
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
let inputSize = 0;
|
let inputSize = 0;
|
||||||
|
@ -33,7 +33,8 @@ export async function load(config: Config): Promise<GraphModel> {
|
||||||
|
|
||||||
async function process(res: Tensor[], outputShape: [number, number], config: Config) {
|
async function process(res: Tensor[], outputShape: [number, number], config: Config) {
|
||||||
let id = 0;
|
let id = 0;
|
||||||
let results: Array<ObjectResult> = [];
|
let results: ObjectResult[] = [];
|
||||||
|
const size = inputSize;
|
||||||
for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects
|
for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects
|
||||||
// find scores, boxes, classes
|
// find scores, boxes, classes
|
||||||
const baseSize = strideSize * 13; // 13x13=169, 26x26=676, 52x52=2704
|
const baseSize = strideSize * 13; // 13x13=169, 26x26=676, 52x52=2704
|
||||||
|
@ -50,7 +51,7 @@ async function process(res: Tensor[], outputShape: [number, number], config: Con
|
||||||
if (score > (config.object.minConfidence || 0) && j !== 61) {
|
if (score > (config.object.minConfidence || 0) && j !== 61) {
|
||||||
const cx = (0.5 + Math.trunc(i % baseSize)) / baseSize; // center.x normalized to range 0..1
|
const cx = (0.5 + Math.trunc(i % baseSize)) / baseSize; // center.x normalized to range 0..1
|
||||||
const cy = (0.5 + Math.trunc(i / baseSize)) / baseSize; // center.y normalized to range 0..1
|
const cy = (0.5 + Math.trunc(i / baseSize)) / baseSize; // center.y normalized to range 0..1
|
||||||
const boxOffset = boxIdx[i].map((a: number) => a * (baseSize / strideSize / inputSize)); // just grab indexes of features with highest scores
|
const boxOffset = boxIdx[i].map((a: number) => a * (baseSize / strideSize / (size))); // just grab indexes of features with highest scores
|
||||||
const [x, y] = [
|
const [x, y] = [
|
||||||
cx - (scaleBox / strideSize * boxOffset[0]),
|
cx - (scaleBox / strideSize * boxOffset[0]),
|
||||||
cy - (scaleBox / strideSize * boxOffset[1]),
|
cy - (scaleBox / strideSize * boxOffset[1]),
|
||||||
|
@ -89,7 +90,7 @@ async function process(res: Tensor[], outputShape: [number, number], config: Con
|
||||||
// unnecessary boxes and run nms only on good candidates (basically it just does IOU analysis as scores are already filtered)
|
// unnecessary boxes and run nms only on good candidates (basically it just does IOU analysis as scores are already filtered)
|
||||||
const nmsBoxes = results.map((a) => [a.boxRaw[1], a.boxRaw[0], a.boxRaw[3], a.boxRaw[2]]); // switches coordinates from x,y to y,x as expected by tf.nms
|
const nmsBoxes = results.map((a) => [a.boxRaw[1], a.boxRaw[0], a.boxRaw[3], a.boxRaw[2]]); // switches coordinates from x,y to y,x as expected by tf.nms
|
||||||
const nmsScores = results.map((a) => a.score);
|
const nmsScores = results.map((a) => a.score);
|
||||||
let nmsIdx: Array<number> = [];
|
let nmsIdx: number[] = [];
|
||||||
if (nmsBoxes && nmsBoxes.length > 0) {
|
if (nmsBoxes && nmsBoxes.length > 0) {
|
||||||
const nms = await tf.image.nonMaxSuppressionAsync(nmsBoxes, nmsScores, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
|
const nms = await tf.image.nonMaxSuppressionAsync(nmsBoxes, nmsScores, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
|
||||||
nmsIdx = await nms.data();
|
nmsIdx = await nms.data();
|
||||||
|
|
|
@ -38,9 +38,9 @@ export interface FaceResult {
|
||||||
/** detected face box normalized to 0..1 */
|
/** detected face box normalized to 0..1 */
|
||||||
boxRaw: Box,
|
boxRaw: Box,
|
||||||
/** detected face mesh */
|
/** detected face mesh */
|
||||||
mesh: Array<Point>
|
mesh: Point[]
|
||||||
/** detected face mesh normalized to 0..1 */
|
/** detected face mesh normalized to 0..1 */
|
||||||
meshRaw: Array<Point>,
|
meshRaw: Point[],
|
||||||
/** face contours as array of 2d points normalized to 0..1 */
|
/** face contours as array of 2d points normalized to 0..1 */
|
||||||
// contoursRaw: Array<[number, number]>,
|
// contoursRaw: Array<[number, number]>,
|
||||||
/** face contours as array of 2d points */
|
/** face contours as array of 2d points */
|
||||||
|
@ -54,11 +54,11 @@ export interface FaceResult {
|
||||||
/** gender detection score */
|
/** gender detection score */
|
||||||
genderScore?: number,
|
genderScore?: number,
|
||||||
/** detected emotions */
|
/** detected emotions */
|
||||||
emotion?: Array<{ score: number, emotion: Emotion }>,
|
emotion?: { score: number, emotion: Emotion }[],
|
||||||
/** detected race */
|
/** detected race */
|
||||||
race?: Array<{ score: number, race: Race }>,
|
race?: { score: number, race: Race }[],
|
||||||
/** face descriptor */
|
/** face descriptor */
|
||||||
embedding?: Array<number>,
|
embedding?: number[],
|
||||||
/** face iris distance from camera */
|
/** face iris distance from camera */
|
||||||
iris?: number,
|
iris?: number,
|
||||||
/** face anti-spoofing result confidence */
|
/** face anti-spoofing result confidence */
|
||||||
|
@ -111,7 +111,7 @@ export interface BodyResult {
|
||||||
/** detected body box normalized to 0..1 */
|
/** detected body box normalized to 0..1 */
|
||||||
boxRaw: Box,
|
boxRaw: Box,
|
||||||
/** detected body keypoints */
|
/** detected body keypoints */
|
||||||
keypoints: Array<BodyKeypoint>
|
keypoints: BodyKeypoint[]
|
||||||
/** detected body keypoints combined into annotated parts */
|
/** detected body keypoints combined into annotated parts */
|
||||||
annotations: Record<BodyAnnotation, Point[][]>,
|
annotations: Record<BodyAnnotation, Point[][]>,
|
||||||
}
|
}
|
||||||
|
@ -136,11 +136,11 @@ export interface HandResult {
|
||||||
/** detected hand box normalized to 0..1 */
|
/** detected hand box normalized to 0..1 */
|
||||||
boxRaw: Box,
|
boxRaw: Box,
|
||||||
/** detected hand keypoints */
|
/** detected hand keypoints */
|
||||||
keypoints: Array<Point>,
|
keypoints: Point[],
|
||||||
/** detected hand class */
|
/** detected hand class */
|
||||||
label: HandType,
|
label: HandType,
|
||||||
/** detected hand keypoints combined into annotated parts */
|
/** detected hand keypoints combined into annotated parts */
|
||||||
annotations: Record<Finger, Array<Point>>,
|
annotations: Record<Finger, Point[]>,
|
||||||
/** detected hand parts annotated with part gestures */
|
/** detected hand parts annotated with part gestures */
|
||||||
landmarks: Record<Finger, { curl: FingerCurl, direction: FingerDirection }>,
|
landmarks: Record<Finger, { curl: FingerCurl, direction: FingerDirection }>,
|
||||||
}
|
}
|
||||||
|
@ -192,7 +192,7 @@ export interface PersonResult {
|
||||||
/** left and right hand results that belong to this person */
|
/** left and right hand results that belong to this person */
|
||||||
hands: { left: HandResult | null, right: HandResult | null },
|
hands: { left: HandResult | null, right: HandResult | null },
|
||||||
/** detected gestures specific to this person */
|
/** detected gestures specific to this person */
|
||||||
gestures: Array<GestureResult>,
|
gestures: GestureResult[],
|
||||||
/** box that defines the person */
|
/** box that defines the person */
|
||||||
box: Box,
|
box: Box,
|
||||||
/** box that defines the person normalized to 0..1 */
|
/** box that defines the person normalized to 0..1 */
|
||||||
|
@ -206,15 +206,15 @@ export interface PersonResult {
|
||||||
*/
|
*/
|
||||||
export interface Result {
|
export interface Result {
|
||||||
/** {@link FaceResult}: detection & analysis results */
|
/** {@link FaceResult}: detection & analysis results */
|
||||||
face: Array<FaceResult>,
|
face: FaceResult[],
|
||||||
/** {@link BodyResult}: detection & analysis results */
|
/** {@link BodyResult}: detection & analysis results */
|
||||||
body: Array<BodyResult>,
|
body: BodyResult[],
|
||||||
/** {@link HandResult}: detection & analysis results */
|
/** {@link HandResult}: detection & analysis results */
|
||||||
hand: Array<HandResult>,
|
hand: HandResult[],
|
||||||
/** {@link GestureResult}: detection & analysis results */
|
/** {@link GestureResult}: detection & analysis results */
|
||||||
gesture: Array<GestureResult>,
|
gesture: GestureResult[],
|
||||||
/** {@link ObjectResult}: detection & analysis results */
|
/** {@link ObjectResult}: detection & analysis results */
|
||||||
object: Array<ObjectResult>
|
object: ObjectResult[]
|
||||||
/** global performance object with timing values for each operation */
|
/** global performance object with timing values for each operation */
|
||||||
performance: Record<string, number>,
|
performance: Record<string, number>,
|
||||||
/** optional processed canvas that can be used to draw input on screen */
|
/** optional processed canvas that can be used to draw input on screen */
|
||||||
|
@ -222,7 +222,7 @@ export interface Result {
|
||||||
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */
|
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */
|
||||||
readonly timestamp: number,
|
readonly timestamp: number,
|
||||||
/** getter property that returns unified persons object */
|
/** getter property that returns unified persons object */
|
||||||
persons: Array<PersonResult>,
|
persons: PersonResult[],
|
||||||
/** Last known error message */
|
/** Last known error message */
|
||||||
error: string | null;
|
error: string | null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,18 +52,17 @@ export async function check(instance: Human, force = false) {
|
||||||
|
|
||||||
// handle webgpu
|
// handle webgpu
|
||||||
if (env.browser && instance.config.backend === 'webgpu') {
|
if (env.browser && instance.config.backend === 'webgpu') {
|
||||||
if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') {
|
if (typeof navigator === 'undefined' || typeof navigator.gpu === 'undefined') {
|
||||||
log('override: backend set to webgpu but browser does not support webgpu');
|
log('override: backend set to webgpu but browser does not support webgpu');
|
||||||
instance.config.backend = 'humangl';
|
instance.config.backend = 'humangl';
|
||||||
} else {
|
} else {
|
||||||
const adapter = await navigator['gpu'].requestAdapter();
|
const adapter = await navigator.gpu.requestAdapter();
|
||||||
if (instance.config.debug) log('enumerated webgpu adapter:', adapter);
|
if (instance.config.debug) log('enumerated webgpu adapter:', adapter);
|
||||||
if (!adapter) {
|
if (!adapter) {
|
||||||
log('override: backend set to webgpu but browser reports no available gpu');
|
log('override: backend set to webgpu but browser reports no available gpu');
|
||||||
instance.config.backend = 'humangl';
|
instance.config.backend = 'humangl';
|
||||||
} else {
|
} else {
|
||||||
// @ts-ignore requestAdapterInfo is not in tslib
|
// @ts-ignore requestAdapterInfo is not in tslib
|
||||||
// eslint-disable-next-line no-undef
|
|
||||||
const adapterInfo = 'requestAdapterInfo' in adapter ? await (adapter as GPUAdapter).requestAdapterInfo() : undefined;
|
const adapterInfo = 'requestAdapterInfo' in adapter ? await (adapter as GPUAdapter).requestAdapterInfo() : undefined;
|
||||||
// if (adapter.features) adapter.features.forEach((feature) => log('webgpu features:', feature));
|
// if (adapter.features) adapter.features.forEach((feature) => log('webgpu features:', feature));
|
||||||
log('webgpu adapter info:', adapterInfo);
|
log('webgpu adapter info:', adapterInfo);
|
||||||
|
@ -86,9 +85,9 @@ export async function check(instance: Human, force = false) {
|
||||||
|
|
||||||
// customize wasm
|
// customize wasm
|
||||||
if (instance.config.backend === 'wasm') {
|
if (instance.config.backend === 'wasm') {
|
||||||
if (tf.env().flagRegistry['CANVAS2D_WILL_READ_FREQUENTLY']) tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
|
if (tf.env().flagRegistry.CANVAS2D_WILL_READ_FREQUENTLY) tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
|
||||||
if (instance.config.debug) log('wasm path:', instance.config.wasmPath);
|
if (instance.config.debug) log('wasm path:', instance.config.wasmPath);
|
||||||
if (typeof tf?.setWasmPaths !== 'undefined') await tf.setWasmPaths(instance.config.wasmPath, instance.config.wasmPlatformFetch);
|
if (typeof tf.setWasmPaths !== 'undefined') await tf.setWasmPaths(instance.config.wasmPath, instance.config.wasmPlatformFetch);
|
||||||
else throw new Error('backend error: attempting to use wasm backend but wasm path is not set');
|
else throw new Error('backend error: attempting to use wasm backend but wasm path is not set');
|
||||||
let mt = false;
|
let mt = false;
|
||||||
let simd = false;
|
let simd = false;
|
||||||
|
@ -114,15 +113,15 @@ export async function check(instance: Human, force = false) {
|
||||||
|
|
||||||
// customize humangl
|
// customize humangl
|
||||||
if (tf.getBackend() === 'humangl') {
|
if (tf.getBackend() === 'humangl') {
|
||||||
if (tf.env().flagRegistry['CHECK_COMPUTATION_FOR_ERRORS']) tf.env().set('CHECK_COMPUTATION_FOR_ERRORS', false);
|
if (tf.env().flagRegistry.CHECK_COMPUTATION_FOR_ERRORS) tf.env().set('CHECK_COMPUTATION_FOR_ERRORS', false);
|
||||||
if (tf.env().flagRegistry['WEBGL_CPU_FORWARD']) tf.env().set('WEBGL_CPU_FORWARD', true);
|
if (tf.env().flagRegistry.WEBGL_CPU_FORWARD) tf.env().set('WEBGL_CPU_FORWARD', true);
|
||||||
if (tf.env().flagRegistry['WEBGL_USE_SHAPES_UNIFORMS']) tf.env().set('WEBGL_USE_SHAPES_UNIFORMS', true);
|
if (tf.env().flagRegistry.WEBGL_USE_SHAPES_UNIFORMS) tf.env().set('WEBGL_USE_SHAPES_UNIFORMS', true);
|
||||||
if (tf.env().flagRegistry['CPU_HANDOFF_SIZE_THRESHOLD']) tf.env().set('CPU_HANDOFF_SIZE_THRESHOLD', 256);
|
if (tf.env().flagRegistry.CPU_HANDOFF_SIZE_THRESHOLD) tf.env().set('CPU_HANDOFF_SIZE_THRESHOLD', 256);
|
||||||
if (tf.env().flagRegistry['WEBGL_EXP_CONV']) tf.env().set('WEBGL_EXP_CONV', true); // <https://github.com/tensorflow/tfjs/issues/6678>
|
if (tf.env().flagRegistry.WEBGL_EXP_CONV) tf.env().set('WEBGL_EXP_CONV', true); // <https://github.com/tensorflow/tfjs/issues/6678>
|
||||||
if (tf.env().flagRegistry['USE_SETTIMEOUTCUSTOM']) tf.env().set('USE_SETTIMEOUTCUSTOM', true); // <https://github.com/tensorflow/tfjs/issues/6687>
|
if (tf.env().flagRegistry.USE_SETTIMEOUTCUSTOM) tf.env().set('USE_SETTIMEOUTCUSTOM', true); // <https://github.com/tensorflow/tfjs/issues/6687>
|
||||||
// if (tf.env().flagRegistry['WEBGL_PACK_DEPTHWISECONV']) tf.env().set('WEBGL_PACK_DEPTHWISECONV', false);
|
// if (tf.env().flagRegistry['WEBGL_PACK_DEPTHWISECONV']) tf.env().set('WEBGL_PACK_DEPTHWISECONV', false);
|
||||||
// if (if (tf.env().flagRegistry['WEBGL_FORCE_F16_TEXTURES']) && !instance.config.object.enabled) tf.env().set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
|
// if (if (tf.env().flagRegistry['WEBGL_FORCE_F16_TEXTURES']) && !instance.config.object.enabled) tf.env().set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
|
||||||
if (typeof instance.config['deallocate'] !== 'undefined' && instance.config['deallocate']) { // hidden param
|
if (typeof instance.config.deallocate !== 'undefined' && instance.config.deallocate) { // hidden param
|
||||||
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true);
|
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true);
|
||||||
tf.env().set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0);
|
tf.env().set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0);
|
||||||
}
|
}
|
||||||
|
@ -154,7 +153,7 @@ export async function check(instance: Human, force = false) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// register fake missing tfjs ops
|
// register fake missing tfjs ops
|
||||||
export function fakeOps(kernelNames: Array<string>, config) {
|
export function fakeOps(kernelNames: string[], config) {
|
||||||
// if (config.debug) log('registerKernel:', kernelNames);
|
// if (config.debug) log('registerKernel:', kernelNames);
|
||||||
for (const kernelName of kernelNames) {
|
for (const kernelName of kernelNames) {
|
||||||
const kernelConfig = {
|
const kernelConfig = {
|
||||||
|
|
|
@ -13,7 +13,7 @@ export const config = {
|
||||||
priority: 999,
|
priority: 999,
|
||||||
canvas: <null | AnyCanvas>null,
|
canvas: <null | AnyCanvas>null,
|
||||||
gl: <null | WebGL2RenderingContext>null,
|
gl: <null | WebGL2RenderingContext>null,
|
||||||
extensions: <string[]> [],
|
extensions: <string[] | null> [],
|
||||||
webGLattr: { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.2
|
webGLattr: { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.2
|
||||||
alpha: false,
|
alpha: false,
|
||||||
antialias: false,
|
antialias: false,
|
||||||
|
@ -33,7 +33,7 @@ function extensions(): void {
|
||||||
*/
|
*/
|
||||||
const gl = config.gl;
|
const gl = config.gl;
|
||||||
if (!gl) return;
|
if (!gl) return;
|
||||||
config.extensions = gl.getSupportedExtensions() as string[];
|
config.extensions = gl.getSupportedExtensions();
|
||||||
// gl.getExtension('KHR_parallel_shader_compile');
|
// gl.getExtension('KHR_parallel_shader_compile');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,7 +62,11 @@ export async function register(instance: Human): Promise<void> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
config.gl = config.canvas?.getContext('webgl2', config.webGLattr) as WebGL2RenderingContext;
|
config.gl = config.canvas.getContext('webgl2', config.webGLattr);
|
||||||
|
if (!config.gl) {
|
||||||
|
log('error: cannot get WebGL context');
|
||||||
|
return;
|
||||||
|
}
|
||||||
const glv2 = config.gl.getParameter(config.gl.VERSION).includes('2.0');
|
const glv2 = config.gl.getParameter(config.gl.VERSION).includes('2.0');
|
||||||
if (!glv2) {
|
if (!glv2) {
|
||||||
log('override: using fallback webgl backend as webgl 2.0 is not detected');
|
log('override: using fallback webgl backend as webgl 2.0 is not detected');
|
||||||
|
@ -123,7 +127,7 @@ export async function register(instance: Human): Promise<void> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (tf.env().flagRegistry['WEBGL_VERSION']) tf.env().set('WEBGL_VERSION', 2);
|
if (tf.env().flagRegistry.WEBGL_VERSION) tf.env().set('WEBGL_VERSION', 2);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log('error: cannot set WebGL backend flags:', err);
|
log('error: cannot set WebGL backend flags:', err);
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -13,7 +13,7 @@ const options = {
|
||||||
modelBasePath: '',
|
modelBasePath: '',
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ModelInfo = {
|
export interface ModelInfo {
|
||||||
name: string,
|
name: string,
|
||||||
inCache: boolean,
|
inCache: boolean,
|
||||||
sizeDesired: number,
|
sizeDesired: number,
|
||||||
|
@ -67,7 +67,7 @@ export async function loadModel(modelPath: string | undefined): Promise<GraphMod
|
||||||
modelStats[shortModelName].sizeFromManifest = artifacts?.weightData?.byteLength || 0;
|
modelStats[shortModelName].sizeFromManifest = artifacts?.weightData?.byteLength || 0;
|
||||||
model.loadSync(artifacts); // load weights
|
model.loadSync(artifacts); // load weights
|
||||||
// @ts-ignore private property
|
// @ts-ignore private property
|
||||||
modelStats[shortModelName].sizeLoadedWeights = model?.artifacts?.weightData?.byteLength || 0;
|
modelStats[shortModelName].sizeLoadedWeights = model.artifacts?.weightData?.byteLength || 0;
|
||||||
if (options.verbose) log('load model:', model['modelUrl'], { bytes: modelStats[shortModelName].sizeLoadedWeights }, options);
|
if (options.verbose) log('load model:', model['modelUrl'], { bytes: modelStats[shortModelName].sizeLoadedWeights }, options);
|
||||||
loaded = true;
|
loaded = true;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
// @ts-nocheck
|
|
||||||
/* eslint-disable */
|
/* eslint-disable */
|
||||||
|
|
||||||
// explicit copy of @types/offscreencanvas to enable typedef bundling
|
// explicit copy of @types/offscreencanvas to enable typedef bundling
|
||||||
|
@ -11,7 +10,7 @@
|
||||||
// TypeScript Version: 4.3
|
// TypeScript Version: 4.3
|
||||||
|
|
||||||
// https://html.spec.whatwg.org/multipage/canvas.html#dom-canvas-transfercontroltooffscreen
|
// https://html.spec.whatwg.org/multipage/canvas.html#dom-canvas-transfercontroltooffscreen
|
||||||
interface HTMLCanvasElement {
|
export interface HTMLCanvasElement {
|
||||||
transferControlToOffscreen(): OffscreenCanvas;
|
transferControlToOffscreen(): OffscreenCanvas;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,19 +62,19 @@ declare function createImageBitmap(image: ImageBitmapSource | OffscreenCanvas, s
|
||||||
sw: number, sh: number): Promise<ImageBitmap>;
|
sw: number, sh: number): Promise<ImageBitmap>;
|
||||||
|
|
||||||
// OffscreenCanvas should be a part of Transferable => extend all postMessage methods
|
// OffscreenCanvas should be a part of Transferable => extend all postMessage methods
|
||||||
interface Worker {
|
export interface Worker {
|
||||||
postMessage(message: any, transfer?: Array<Transferable | OffscreenCanvas>): void;
|
postMessage(message: any, transfer?: Array<Transferable | OffscreenCanvas>): void;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ServiceWorker {
|
export interface ServiceWorker {
|
||||||
postMessage(message: any, transfer?: Array<Transferable | OffscreenCanvas>): void;
|
postMessage(message: any, transfer?: Array<Transferable | OffscreenCanvas>): void;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface MessagePort {
|
export interface MessagePort {
|
||||||
postMessage(message: any, transfer?: Array<Transferable | OffscreenCanvas>): void;
|
postMessage(message: any, transfer?: Array<Transferable | OffscreenCanvas>): void;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface Window {
|
export interface Window {
|
||||||
postMessage(message: any, targetOrigin: string, transfer?: Array<Transferable | OffscreenCanvas>): void;
|
postMessage(message: any, targetOrigin: string, transfer?: Array<Transferable | OffscreenCanvas>): void;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
/* eslint-disable import/no-extraneous-dependencies */
|
||||||
|
|
||||||
export declare const version: {
|
export declare const version: {
|
||||||
'tfjs-core': string;
|
'tfjs-core': string;
|
||||||
'tfjs-backend-cpu': string;
|
'tfjs-backend-cpu': string;
|
||||||
|
@ -7,11 +9,12 @@ export declare const version: {
|
||||||
'tfjs-converter': string;
|
'tfjs-converter': string;
|
||||||
tfjs: string;
|
tfjs: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export * from '@tensorflow/tfjs-core';
|
export * from '@tensorflow/tfjs-core';
|
||||||
export * from '@tensorflow/tfjs-converter';
|
export * from '@tensorflow/tfjs-converter';
|
||||||
export * from '@tensorflow/tfjs-data';
|
export * from '@tensorflow/tfjs-data';
|
||||||
// export * from "@tensorflow/tfjs-layers";
|
export * from '@tensorflow/tfjs-layers';
|
||||||
// export * from "@tensorflow/tfjs-backend-cpu";
|
export * from '@tensorflow/tfjs-backend-cpu';
|
||||||
// export * from "@tensorflow/tfjs-backend-wasm";
|
export * from '@tensorflow/tfjs-backend-wasm';
|
||||||
// export * from "@tensorflow/tfjs-backend-webgl";
|
export * from '@tensorflow/tfjs-backend-webgl';
|
||||||
export {};
|
export {};
|
||||||
|
|
|
@ -4,16 +4,15 @@
|
||||||
* TensorFlow Tensor type
|
* TensorFlow Tensor type
|
||||||
* @external
|
* @external
|
||||||
*/
|
*/
|
||||||
export type { Tensor, TensorLike, Rank } from '@tensorflow/tfjs-core/dist/index';
|
export type { Tensor, TensorLike, Rank } from '@tensorflow/tfjs-core/dist/index'; // eslint-disable-line import/no-extraneous-dependencies
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TensorFlow GraphModel type
|
* TensorFlow GraphModel type
|
||||||
* @external
|
* @external
|
||||||
*/
|
*/
|
||||||
export type { GraphModel } from '@tensorflow/tfjs-converter/dist/index';
|
export type { GraphModel } from '@tensorflow/tfjs-converter/dist/index'; // eslint-disable-line import/no-extraneous-dependencies
|
||||||
|
|
||||||
/** Tensorflow Long type
|
/** Tensorflow Long type
|
||||||
* @external long
|
* @external long
|
||||||
*/
|
*/
|
||||||
// eslint-disable-next-line node/no-missing-import
|
|
||||||
// export type { Long } from 'long';
|
// export type { Long } from 'long';
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import type { Point, Box } from '../result';
|
import type { Point, Box } from '../result';
|
||||||
|
|
||||||
export function calc(keypoints: Array<Point>, outputSize: [number, number] = [1, 1]) {
|
export function calc(keypoints: Point[], outputSize: [number, number] = [1, 1]) {
|
||||||
const coords = [keypoints.map((pt) => pt[0]), keypoints.map((pt) => pt[1])]; // all x/y coords
|
const coords = [keypoints.map((pt) => pt[0]), keypoints.map((pt) => pt[1])]; // all x/y coords
|
||||||
const min = [Math.min(...coords[0]), Math.min(...coords[1])];
|
const min = [Math.min(...coords[0]), Math.min(...coords[1])];
|
||||||
const max = [Math.max(...coords[0]), Math.max(...coords[1])];
|
const max = [Math.max(...coords[0]), Math.max(...coords[1])];
|
||||||
|
@ -9,7 +9,7 @@ export function calc(keypoints: Array<Point>, outputSize: [number, number] = [1,
|
||||||
return { box, boxRaw };
|
return { box, boxRaw };
|
||||||
}
|
}
|
||||||
|
|
||||||
export function square(keypoints: Array<Point>, outputSize: [number, number] = [1, 1]) {
|
export function square(keypoints: Point[], outputSize: [number, number] = [1, 1]) {
|
||||||
const coords = [keypoints.map((pt) => pt[0]), keypoints.map((pt) => pt[1])]; // all x/y coords
|
const coords = [keypoints.map((pt) => pt[0]), keypoints.map((pt) => pt[1])]; // all x/y coords
|
||||||
const min = [Math.min(...coords[0]), Math.min(...coords[1])];
|
const min = [Math.min(...coords[0]), Math.min(...coords[1])];
|
||||||
const max = [Math.max(...coords[0]), Math.max(...coords[1])];
|
const max = [Math.max(...coords[0]), Math.max(...coords[1])];
|
||||||
|
|
|
@ -124,8 +124,8 @@ export class Env {
|
||||||
// analyze backends
|
// analyze backends
|
||||||
this.backends = Object.keys(tf.engine().registryFactory);
|
this.backends = Object.keys(tf.engine().registryFactory);
|
||||||
this.tensorflow = {
|
this.tensorflow = {
|
||||||
version: (tf.backend()['binding'] ? tf.backend()['binding']['TF_Version'] : undefined),
|
version: (tf.backend().binding ? tf.backend().binding.TF_Version : undefined),
|
||||||
gpu: (tf.backend()['binding'] ? tf.backend()['binding'].isUsingGpuDevice() : undefined),
|
gpu: (tf.backend().binding ? tf.backend().binding.isUsingGpuDevice() : undefined),
|
||||||
};
|
};
|
||||||
this.wasm.supported = typeof WebAssembly !== 'undefined';
|
this.wasm.supported = typeof WebAssembly !== 'undefined';
|
||||||
this.wasm.backend = this.backends.includes('wasm');
|
this.wasm.backend = this.backends.includes('wasm');
|
||||||
|
@ -139,19 +139,19 @@ export class Env {
|
||||||
this.webgl.supported = typeof ctx !== 'undefined';
|
this.webgl.supported = typeof ctx !== 'undefined';
|
||||||
this.webgl.backend = this.backends.includes('webgl');
|
this.webgl.backend = this.backends.includes('webgl');
|
||||||
if (this.webgl.supported && this.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {
|
if (this.webgl.supported && this.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {
|
||||||
// @ts-ignore getGPGPUContext only exists on WebGL backend
|
|
||||||
const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null;
|
const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null;
|
||||||
if (gl) {
|
if (gl) {
|
||||||
this.webgl.version = gl.getParameter(gl.VERSION);
|
this.webgl.version = gl.getParameter(gl.VERSION);
|
||||||
this.webgl.renderer = gl.getParameter(gl.RENDERER);
|
this.webgl.renderer = gl.getParameter(gl.RENDERER);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// @ts-ignore navigator.gpu is only defined when webgpu is available in browser
|
this.webgpu.supported = this.browser && typeof navigator.gpu !== 'undefined';
|
||||||
this.webgpu.supported = this.browser && typeof navigator['gpu'] !== 'undefined';
|
|
||||||
this.webgpu.backend = this.backends.includes('webgpu');
|
this.webgpu.backend = this.backends.includes('webgpu');
|
||||||
try {
|
try {
|
||||||
// @ts-ignore navigator.gpu is only defined when webgpu is available in browser
|
if (this.webgpu.supported) {
|
||||||
if (this.webgpu.supported) this.webgpu.adapter = (await navigator['gpu'].requestAdapter()).name;
|
const adapter = await navigator.gpu.requestAdapter();
|
||||||
|
this.webgpu.adapter = adapter ? adapter.name : undefined;
|
||||||
|
}
|
||||||
} catch {
|
} catch {
|
||||||
this.webgpu.supported = false;
|
this.webgpu.supported = false;
|
||||||
}
|
}
|
||||||
|
@ -175,8 +175,8 @@ export class Env {
|
||||||
} catch { }
|
} catch { }
|
||||||
*/
|
*/
|
||||||
}
|
}
|
||||||
if (!this['cpu']) Object.defineProperty(this, 'cpu', { value: cpu });
|
if (!this.cpu) Object.defineProperty(this, 'cpu', { value: cpu });
|
||||||
else this['cpu'] = cpu;
|
else this.cpu = cpu;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,7 @@ export function calc(newResult: Result, config: Config): Result {
|
||||||
|
|
||||||
// interpolate body results
|
// interpolate body results
|
||||||
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) {
|
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) {
|
||||||
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body as BodyResult[])); // deep clone once
|
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body)) as BodyResult[]; // deep clone once
|
||||||
} else {
|
} else {
|
||||||
for (let i = 0; i < newResult.body.length; i++) {
|
for (let i = 0; i < newResult.body.length; i++) {
|
||||||
const box = newResult.body[i].box // update box
|
const box = newResult.body[i].box // update box
|
||||||
|
@ -46,7 +46,7 @@ export function calc(newResult: Result, config: Config): Result {
|
||||||
const keypoints = (newResult.body[i].keypoints // update keypoints
|
const keypoints = (newResult.body[i].keypoints // update keypoints
|
||||||
.map((newKpt, j) => ({
|
.map((newKpt, j) => ({
|
||||||
score: newKpt.score,
|
score: newKpt.score,
|
||||||
part: newKpt.part as BodyLandmark,
|
part: newKpt.part,
|
||||||
position: [
|
position: [
|
||||||
bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].position[0] || 0) + (newKpt.position[0] || 0)) / bufferedFactor : newKpt.position[0],
|
bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].position[0] || 0) + (newKpt.position[0] || 0)) / bufferedFactor : newKpt.position[0],
|
||||||
bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].position[1] || 0) + (newKpt.position[1] || 0)) / bufferedFactor : newKpt.position[1],
|
bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].position[1] || 0) + (newKpt.position[1] || 0)) / bufferedFactor : newKpt.position[1],
|
||||||
|
@ -62,15 +62,15 @@ export function calc(newResult: Result, config: Config): Result {
|
||||||
bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].distance?.[1] || 0) + (newKpt.distance?.[1] || 0)) / bufferedFactor : newKpt.distance?.[1],
|
bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].distance?.[1] || 0) + (newKpt.distance?.[1] || 0)) / bufferedFactor : newKpt.distance?.[1],
|
||||||
bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].distance?.[2] || 0) + (newKpt.distance?.[2] || 0)) / bufferedFactor : newKpt.distance?.[2],
|
bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * (bufferedResult.body[i].keypoints[j].distance?.[2] || 0) + (newKpt.distance?.[2] || 0)) / bufferedFactor : newKpt.distance?.[2],
|
||||||
],
|
],
|
||||||
}))) as Array<{ score: number, part: BodyLandmark, position: [number, number, number?], positionRaw: [number, number, number?] }>;
|
}))) as { score: number, part: BodyLandmark, position: [number, number, number?], positionRaw: [number, number, number?] }[];
|
||||||
|
|
||||||
const annotations: Record<BodyAnnotation, Point[][]> = {} as Record<BodyAnnotation, Point[][]>; // recreate annotations
|
const annotations: Record<BodyAnnotation, Point[][]> = {} as Record<BodyAnnotation, Point[][]>; // recreate annotations
|
||||||
let coords = { connected: {} };
|
let coords = { connected: {} };
|
||||||
if (config.body?.modelPath?.includes('efficientpose')) coords = efficientPoseCoords;
|
if (config.body.modelPath?.includes('efficientpose')) coords = efficientPoseCoords;
|
||||||
else if (config.body?.modelPath?.includes('blazepose')) coords = blazePoseCoords;
|
else if (config.body.modelPath?.includes('blazepose')) coords = blazePoseCoords;
|
||||||
else if (config.body?.modelPath?.includes('movenet')) coords = moveNetCoords;
|
else if (config.body.modelPath?.includes('movenet')) coords = moveNetCoords;
|
||||||
for (const [name, indexes] of Object.entries(coords.connected as Record<string, string[]>)) {
|
for (const [name, indexes] of Object.entries(coords.connected as Record<string, string[]>)) {
|
||||||
const pt: Array<Point[]> = [];
|
const pt: Point[][] = [];
|
||||||
for (let j = 0; j < indexes.length - 1; j++) {
|
for (let j = 0; j < indexes.length - 1; j++) {
|
||||||
const pt0 = keypoints.find((kp) => kp.part === indexes[j]);
|
const pt0 = keypoints.find((kp) => kp.part === indexes[j]);
|
||||||
const pt1 = keypoints.find((kp) => kp.part === indexes[j + 1]);
|
const pt1 = keypoints.find((kp) => kp.part === indexes[j + 1]);
|
||||||
|
@ -79,13 +79,13 @@ export function calc(newResult: Result, config: Config): Result {
|
||||||
}
|
}
|
||||||
annotations[name] = pt;
|
annotations[name] = pt;
|
||||||
}
|
}
|
||||||
bufferedResult.body[i] = { ...newResult.body[i], box, boxRaw, keypoints, annotations: annotations as BodyResult['annotations'] }; // shallow clone plus updated values
|
bufferedResult.body[i] = { ...newResult.body[i], box, boxRaw, keypoints, annotations }; // shallow clone plus updated values
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// interpolate hand results
|
// interpolate hand results
|
||||||
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) {
|
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) {
|
||||||
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand as HandResult[])); // deep clone once
|
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand)); // deep clone once
|
||||||
} else {
|
} else {
|
||||||
for (let i = 0; i < newResult.hand.length; i++) {
|
for (let i = 0; i < newResult.hand.length; i++) {
|
||||||
const box = (newResult.hand[i].box// update box
|
const box = (newResult.hand[i].box// update box
|
||||||
|
@ -116,7 +116,7 @@ export function calc(newResult: Result, config: Config): Result {
|
||||||
|
|
||||||
// interpolate face results
|
// interpolate face results
|
||||||
if (!bufferedResult.face || (newResult.face.length !== bufferedResult.face.length)) {
|
if (!bufferedResult.face || (newResult.face.length !== bufferedResult.face.length)) {
|
||||||
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face as FaceResult[])); // deep clone once
|
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face)) as FaceResult[]; // deep clone once
|
||||||
} else {
|
} else {
|
||||||
for (let i = 0; i < newResult.face.length; i++) {
|
for (let i = 0; i < newResult.face.length; i++) {
|
||||||
const box = (newResult.face[i].box // update box
|
const box = (newResult.face[i].box // update box
|
||||||
|
@ -131,14 +131,14 @@ export function calc(newResult: Result, config: Config): Result {
|
||||||
} = { matrix: [0, 0, 0, 0, 0, 0, 0, 0, 0], angle: { roll: 0, yaw: 0, pitch: 0 }, gaze: { bearing: 0, strength: 0 } };
|
} = { matrix: [0, 0, 0, 0, 0, 0, 0, 0, 0], angle: { roll: 0, yaw: 0, pitch: 0 }, gaze: { bearing: 0, strength: 0 } };
|
||||||
rotation.matrix = newResult.face[i].rotation?.matrix as [number, number, number, number, number, number, number, number, number];
|
rotation.matrix = newResult.face[i].rotation?.matrix as [number, number, number, number, number, number, number, number, number];
|
||||||
rotation.angle = {
|
rotation.angle = {
|
||||||
roll: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle?.roll || 0) + (newResult.face[i].rotation?.angle?.roll || 0)) / bufferedFactor,
|
roll: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle.roll || 0) + (newResult.face[i].rotation?.angle.roll || 0)) / bufferedFactor,
|
||||||
yaw: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle?.yaw || 0) + (newResult.face[i].rotation?.angle?.yaw || 0)) / bufferedFactor,
|
yaw: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle.yaw || 0) + (newResult.face[i].rotation?.angle.yaw || 0)) / bufferedFactor,
|
||||||
pitch: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle?.pitch || 0) + (newResult.face[i].rotation?.angle?.pitch || 0)) / bufferedFactor,
|
pitch: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.angle.pitch || 0) + (newResult.face[i].rotation?.angle.pitch || 0)) / bufferedFactor,
|
||||||
};
|
};
|
||||||
rotation.gaze = {
|
rotation.gaze = {
|
||||||
// not fully correct due projection on circle, also causes wrap-around draw on jump from negative to positive
|
// not fully correct due projection on circle, also causes wrap-around draw on jump from negative to positive
|
||||||
bearing: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.gaze?.bearing || 0) + (newResult.face[i].rotation?.gaze?.bearing || 0)) / bufferedFactor,
|
bearing: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.gaze.bearing || 0) + (newResult.face[i].rotation?.gaze.bearing || 0)) / bufferedFactor,
|
||||||
strength: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.gaze?.strength || 0) + (newResult.face[i].rotation?.gaze?.strength || 0)) / bufferedFactor,
|
strength: ((bufferedFactor - 1) * (bufferedResult.face[i].rotation?.gaze.strength || 0) + (newResult.face[i].rotation?.gaze.strength || 0)) / bufferedFactor,
|
||||||
};
|
};
|
||||||
bufferedResult.face[i] = { ...newResult.face[i], rotation, box, boxRaw }; // shallow clone plus updated values
|
bufferedResult.face[i] = { ...newResult.face[i], rotation, box, boxRaw }; // shallow clone plus updated values
|
||||||
}
|
}
|
||||||
|
@ -148,7 +148,7 @@ export function calc(newResult: Result, config: Config): Result {
|
||||||
|
|
||||||
// interpolate object detection results
|
// interpolate object detection results
|
||||||
if (!bufferedResult.object || (newResult.object.length !== bufferedResult.object.length)) {
|
if (!bufferedResult.object || (newResult.object.length !== bufferedResult.object.length)) {
|
||||||
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object as ObjectResult[])); // deep clone once
|
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object)) as ObjectResult[]; // deep clone once
|
||||||
} else {
|
} else {
|
||||||
for (let i = 0; i < newResult.object.length; i++) {
|
for (let i = 0; i < newResult.object.length; i++) {
|
||||||
const box = (newResult.object[i].box // update box
|
const box = (newResult.object[i].box // update box
|
||||||
|
@ -163,7 +163,7 @@ export function calc(newResult: Result, config: Config): Result {
|
||||||
if (newResult.persons) {
|
if (newResult.persons) {
|
||||||
const newPersons = newResult.persons; // trigger getter function
|
const newPersons = newResult.persons; // trigger getter function
|
||||||
if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) {
|
if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) {
|
||||||
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons as PersonResult[]));
|
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons)) as PersonResult[];
|
||||||
} else {
|
} else {
|
||||||
for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow
|
for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow
|
||||||
bufferedResult.persons[i].box = (newPersons[i].box
|
bufferedResult.persons[i].box = (newPersons[i].box
|
||||||
|
|
|
@ -4,9 +4,9 @@
|
||||||
|
|
||||||
import type { FaceResult, BodyResult, HandResult, GestureResult, PersonResult, Box } from '../result';
|
import type { FaceResult, BodyResult, HandResult, GestureResult, PersonResult, Box } from '../result';
|
||||||
|
|
||||||
export function join(faces: Array<FaceResult>, bodies: Array<BodyResult>, hands: Array<HandResult>, gestures: Array<GestureResult>, shape: Array<number> | undefined): Array<PersonResult> {
|
export function join(faces: FaceResult[], bodies: BodyResult[], hands: HandResult[], gestures: GestureResult[], shape: number[] | undefined): PersonResult[] {
|
||||||
let id = 0;
|
let id = 0;
|
||||||
const persons: Array<PersonResult> = [];
|
const persons: PersonResult[] = [];
|
||||||
for (const face of faces) { // person is defined primarily by face and then we append other objects as found
|
for (const face of faces) { // person is defined primarily by face and then we append other objects as found
|
||||||
const person: PersonResult = { id: id++, face, body: null, hands: { left: null, right: null }, gestures: [], box: [0, 0, 0, 0] };
|
const person: PersonResult = { id: id++, face, body: null, hands: { left: null, right: null }, gestures: [], box: [0, 0, 0, 0] };
|
||||||
for (const body of bodies) {
|
for (const body of bodies) {
|
||||||
|
@ -34,11 +34,11 @@ export function join(faces: Array<FaceResult>, bodies: Array<BodyResult>, hands:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const gesture of gestures) { // append all gestures according to ids
|
for (const gesture of gestures) { // append all gestures according to ids
|
||||||
if (gesture['face'] !== undefined && gesture['face'] === face.id) person.gestures?.push(gesture);
|
if (gesture['face'] !== undefined && gesture['face'] === face.id) person.gestures.push(gesture);
|
||||||
else if (gesture['iris'] !== undefined && gesture['iris'] === face.id) person.gestures?.push(gesture);
|
else if (gesture['iris'] !== undefined && gesture['iris'] === face.id) person.gestures.push(gesture);
|
||||||
else if (gesture['body'] !== undefined && gesture['body'] === person.body?.id) person.gestures?.push(gesture);
|
else if (gesture['body'] !== undefined && gesture['body'] === person.body?.id) person.gestures.push(gesture);
|
||||||
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.left?.id) person.gestures?.push(gesture);
|
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands.left?.id) person.gestures.push(gesture);
|
||||||
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.right?.id) person.gestures?.push(gesture);
|
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands.right?.id) person.gestures.push(gesture);
|
||||||
}
|
}
|
||||||
|
|
||||||
// create new overarching box from all boxes belonging to person
|
// create new overarching box from all boxes belonging to person
|
||||||
|
@ -50,10 +50,10 @@ export function join(faces: Array<FaceResult>, bodies: Array<BodyResult>, hands:
|
||||||
y.push(box[1], box[1] + box[3]);
|
y.push(box[1], box[1] + box[3]);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
extractXY(person.face?.box);
|
extractXY(person.face.box);
|
||||||
extractXY(person.body?.box);
|
extractXY(person.body?.box);
|
||||||
extractXY(person.hands?.left?.box);
|
extractXY(person.hands.left?.box);
|
||||||
extractXY(person.hands?.right?.box);
|
extractXY(person.hands.right?.box);
|
||||||
const minX = Math.min(...x);
|
const minX = Math.min(...x);
|
||||||
const minY = Math.min(...y);
|
const minY = Math.min(...y);
|
||||||
person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box
|
person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue