convert to typescript

pull/293/head
Vladimir Mandic 2021-02-08 11:39:09 -05:00
parent ab620c85fa
commit bfe688251b
22 changed files with 160 additions and 143 deletions

View File

@ -46,7 +46,7 @@ Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows an
<br>
*This is a pre-release project, see [issues](https://github.com/vladmandic/human/issues) for list of known limitations and planned enhancements*
*See [issues](https://github.com/vladmandic/human/issues?q=) and [discussions](https://github.com/vladmandic/human/discussions) for list of known limitations and planned enhancements*
*Suggestions are welcome!*

View File

@ -56,14 +56,19 @@ async function detect(input) {
}
async function test() {
// test with embedded face image
log.state('Processing embedded warmup image: face');
myConfig.warmup = 'face';
const resultFace = await human.warmup(myConfig);
log.data(resultFace);
log.data('Face: ', resultFace.face);
// test with embedded full body image
log.state('Processing embedded warmup image: full');
myConfig.warmup = 'full';
const resultFull = await human.warmup(myConfig);
log.data(resultFull);
log.data('Body:', resultFull.body);
log.data('Hand:', resultFull.hand);
log.data('Gesture:', resultFull.gesture);
}
async function main() {

View File

@ -35,7 +35,7 @@
"@vladmandic/pilogger": "^0.2.14",
"chokidar": "^3.5.1",
"dayjs": "^1.10.4",
"esbuild": "^0.8.42",
"esbuild": "^0.8.43",
"eslint": "^7.19.0",
"eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-import": "^2.22.1",
@ -44,7 +44,9 @@
"eslint-plugin-promise": "^4.2.1",
"rimraf": "^3.0.2",
"seedrandom": "^3.0.5",
"simple-git": "^2.32.0"
"simple-git": "^2.34.2",
"tslib": "^2.1.0",
"typescript": "^4.1.3"
},
"scripts": {
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",

View File

@ -1,12 +1,12 @@
import { log } from '../log.js';
import { log } from '../log';
import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile.js';
const models = {};
const models = { age: null };
let last = { age: 0 };
let skipped = Number.MAX_SAFE_INTEGER;
async function load(config) {
export async function load(config) {
if (!models.age) {
models.age = await tf.loadGraphModel(config.face.age.modelPath);
log(`load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`);
@ -14,7 +14,7 @@ async function load(config) {
return models.age;
}
async function predict(image, config) {
export async function predict(image, config) {
if (!models.age) return null;
if ((skipped < config.face.age.skipFrames) && config.videoOptimized && last.age && (last.age > 0)) {
skipped++;
@ -38,29 +38,27 @@ async function predict(image, config) {
tf.dispose(resize);
let ageT;
const obj = {};
const obj = { age: undefined };
if (!config.profile) {
if (config.face.age.enabled) ageT = await models.age.predict(enhance);
} else {
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone();
profileAge.result.dispose();
// @ts-ignore
profile.run('age', profileAge);
if (models.age) {
if (!config.profile) {
if (config.face.age.enabled) ageT = await models.age.predict(enhance);
} else {
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone();
profileAge.result.dispose();
profile.run('age', profileAge);
}
enhance.dispose();
if (ageT) {
const data = ageT.dataSync();
obj.age = Math.trunc(10 * data[0]) / 10;
}
ageT.dispose();
last = obj;
}
enhance.dispose();
if (ageT) {
const data = ageT.dataSync();
obj.age = Math.trunc(10 * data[0]) / 10;
}
ageT.dispose();
last = obj;
resolve(obj);
});
}
exports.predict = predict;
exports.load = load;

View File

@ -1,4 +1,4 @@
const MESH_ANNOTATIONS = {
export const MESH_ANNOTATIONS = {
silhouette: [
10, 338, 297, 332, 284, 251, 389, 356, 454, 323, 361, 288,
397, 365, 379, 378, 400, 377, 152, 148, 176, 149, 150, 136,
@ -37,7 +37,7 @@ const MESH_ANNOTATIONS = {
leftCheek: [425],
};
const MESH_TO_IRIS_INDICES_MAP = [ // A mapping from facemesh model keypoints to iris model keypoints.
export const MESH_TO_IRIS_INDICES_MAP = [ // A mapping from facemesh model keypoints to iris model keypoints.
{ key: 'EyeUpper0', indices: [9, 10, 11, 12, 13, 14, 15] },
{ key: 'EyeUpper1', indices: [25, 26, 27, 28, 29, 30, 31] },
{ key: 'EyeUpper2', indices: [41, 42, 43, 44, 45, 46, 47] },
@ -49,7 +49,7 @@ const MESH_TO_IRIS_INDICES_MAP = [ // A mapping from facemesh model keypoints to
// { key: 'EyebrowLower', indices: [48, 49, 50, 51, 52, 53] },
];
const UV468 = [
export const UV468 = [
[0.499976992607117, 0.652534008026123],
[0.500025987625122, 0.547487020492554],
[0.499974012374878, 0.602371990680695],
@ -520,7 +520,7 @@ const UV468 = [
[0.723330020904541, 0.363372981548309],
];
const TRI468 = [
export const TRI468 = [
127, 34, 139, 11, 0, 37, 232, 231, 120, 72, 37, 39, 128, 121, 47, 232, 121, 128, 104, 69, 67, 175, 171, 148, 157, 154, 155, 118, 50, 101, 73, 39, 40, 9,
151, 108, 48, 115, 131, 194, 204, 211, 74, 40, 185, 80, 42, 183, 40, 92, 186, 230, 229, 118, 202, 212, 214, 83, 18, 17, 76, 61, 146, 160, 29, 30, 56,
157, 173, 106, 204, 194, 135, 214, 192, 203, 165, 98, 21, 71, 68, 51, 45, 4, 144, 24, 23, 77, 146, 91, 205, 50, 187, 201, 200, 18, 91, 106, 182, 90, 91,
@ -606,7 +606,7 @@ const TRI468 = [
259, 443, 259, 260, 444, 260, 467, 445, 309, 459, 250, 305, 289, 290, 305, 290, 460, 401, 376, 435, 309, 250, 392, 376, 411, 433, 453, 341, 464, 357,
453, 465, 343, 357, 412, 437, 343, 399, 344, 360, 440, 420, 437, 456, 360, 420, 363, 361, 401, 288, 265, 372, 353, 390, 339, 249, 339, 448, 255];
const TRI68 = [0, 1, 36, 0, 36, 17, 1, 2, 41, 1, 41, 36, 2, 3, 31, 2, 31, 41, 3, 4, 48, 3, 48, 31, 4, 5, 48, 5, 6, 48, 6, 7, 59, 6, 59, 48, 7, 8, 58, 7, 58, 59,
export const TRI68 = [0, 1, 36, 0, 36, 17, 1, 2, 41, 1, 41, 36, 2, 3, 31, 2, 31, 41, 3, 4, 48, 3, 48, 31, 4, 5, 48, 5, 6, 48, 6, 7, 59, 6, 59, 48, 7, 8, 58, 7, 58, 59,
8, 9, 56, 8, 56, 57, 8, 57, 58, 9, 10, 55, 9, 55, 56, 10, 11, 54, 10, 54, 55, 11, 12, 54, 12, 13, 54, 13, 14, 35, 13, 35, 54, 14, 15, 46, 14, 46, 35, 15, 16,
45, 15, 45, 46, 16, 26, 45, 17, 36, 18, 18, 37, 19, 18, 36, 37, 19, 38, 20, 19, 37, 38, 20, 39, 21, 20, 38, 39, 21, 39, 27, 22, 42, 23, 22, 27, 42, 23, 43, 24,
23, 42, 43, 24, 44, 25, 24, 43, 44, 25, 45, 26, 25, 44, 45, 27, 39, 28, 27, 28, 42, 28, 39, 29, 28, 29, 42, 29, 31, 30, 29, 30, 35, 29, 40, 31, 29, 35, 47, 29,
@ -614,7 +614,8 @@ const TRI68 = [0, 1, 36, 0, 36, 17, 1, 2, 41, 1, 41, 36, 2, 3, 31, 2, 31, 41, 3,
35, 34, 51, 52, 35, 46, 47, 35, 52, 53, 35, 53, 54, 36, 41, 37, 37, 40, 38, 37, 41, 40, 38, 40, 39, 42, 47, 43, 43, 47, 44, 44, 46, 45, 44, 47, 46, 48, 60, 49,
48, 59, 60, 49, 61, 50, 49, 60, 61, 50, 62, 51, 50, 61, 62, 51, 62, 52, 52, 63, 53, 52, 62, 63, 53, 64, 54, 53, 63, 64, 54, 64, 55, 55, 65, 56, 55, 64, 65, 56,
66, 57, 56, 65, 66, 57, 66, 58, 58, 67, 59, 58, 66, 67, 59, 67, 60, 60, 67, 61, 61, 66, 62, 61, 67, 66, 62, 66, 63, 63, 65, 64, 63, 66, 65, 21, 27, 22];
const TRI33 = [
export const TRI33 = [
/* eyes */ 0, 8, 7, 7, 8, 1, 2, 10, 9, 9, 10, 3,
/* brows */ 17, 0, 18, 18, 0, 7, 18, 7, 19, 19, 7, 1, 19, 1, 11, 19, 11, 20, 21, 3, 22, 21, 9, 3, 20, 9, 21, 20, 2, 9, 20, 11, 2,
/* 4head */ 23, 17, 18, 25, 21, 22, 24, 19, 20, 24, 18, 19, 24, 20, 21, 24, 23, 18, 24, 21, 25,
@ -624,9 +625,10 @@ const TRI33 = [
/* chin */ 5, 32, 16, 16, 32, 6, 5, 30, 32, 6, 32, 31,
/* cont */ 26, 30, 5, 27, 6, 31, 0, 28, 26, 3, 27, 29, 17, 28, 0, 3, 29, 22, 23, 28, 17, 22, 29, 25, 28, 30, 26, 27, 31, 29,
];
const TRI7 = [0, 4, 1, 2, 4, 3, 4, 5, 6];
const VTX68 = [
export const TRI7 = [0, 4, 1, 2, 4, 3, 4, 5, 6];
export const VTX68 = [
/* cont */ 127, 234, 132, 58, 172, 150, 149, 148, 152, 377, 378, 379, 397, 288, 361, 454, 356,
/* brows */ 70, 63, 105, 66, 107, 336, 296, 334, 293, 300,
/* nose */ 168, 6, 195, 4, 98, 97, 2, 326, 327,
@ -634,18 +636,13 @@ const VTX68 = [
/* lip */ 57, 40, 37, 0, 267, 270, 287, 321, 314, 17, 84, 91,
/* mouth */ 78, 81, 13, 311, 308, 402, 14, 178,
];
const VTX33 = [33, 133, 362, 263, 1, 62, 308, 159, 145, 386, 374, 6, 102, 331, 2, 13, 14, 70, 105, 107, 336, 334, 300, 54, 10, 284, 50, 280, 234, 454, 58, 288, 152];
const VTX7 = [33, 133, 362, 263, 1, 78, 308];
exports.MESH_ANNOTATIONS = MESH_ANNOTATIONS;
exports.MESH_TO_IRIS_INDICES_MAP = MESH_TO_IRIS_INDICES_MAP;
export const VTX33 = [33, 133, 362, 263, 1, 62, 308, 159, 145, 386, 374, 6, 102, 331, 2, 13, 14, 70, 105, 107, 336, 334, 300, 54, 10, 284, 50, 280, 234, 454, 58, 288, 152];
exports.TRI468 = TRI468;
exports.TRI68 = TRI68;
exports.TRI33 = TRI33;
exports.TRI7 = TRI7;
export const VTX7 = [33, 133, 362, 263, 1, 78, 308];
exports.UV468 = UV468;
exports.UV68 = VTX68.map((x) => UV468[x]);
exports.UV33 = VTX33.map((x) => UV468[x]);
exports.UV7 = VTX7.map((x) => UV468[x]);
export const UV68 = VTX68.map((x) => UV468[x]);
export const UV33 = VTX33.map((x) => UV468[x]);
export const UV7 = VTX7.map((x) => UV468[x]);

View File

@ -1,9 +1,9 @@
import { log } from '../log.js';
import { log } from '../log';
import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile.js';
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];
const models = {};
const models = { emotion: null };
let last = [];
let skipped = Number.MAX_SAFE_INTEGER;
@ -11,7 +11,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
const scale = 1; // score multiplication factor
async function load(config) {
export async function load(config) {
if (!models.emotion) {
models.emotion = await tf.loadGraphModel(config.face.emotion.modelPath);
log(`load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
@ -19,7 +19,7 @@ async function load(config) {
return models.emotion;
}
async function predict(image, config) {
export async function predict(image, config) {
if (!models.emotion) return null;
if ((skipped < config.face.emotion.skipFrames) && config.videoOptimized && (last.length > 0)) {
skipped++;
@ -77,6 +77,3 @@ async function predict(image, config) {
resolve(obj);
});
}
exports.predict = predict;
exports.load = load;

View File

@ -1,8 +1,8 @@
import { log } from '../log.js';
import { log } from '../log';
import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile.js';
const models = {};
const models = { gender: null };
let last = { gender: '' };
let skipped = Number.MAX_SAFE_INTEGER;
let alternative = false;
@ -10,7 +10,7 @@ let alternative = false;
// tuning values
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
async function load(config) {
export async function load(config) {
if (!models.gender) {
models.gender = await tf.loadGraphModel(config.face.gender.modelPath);
alternative = models.gender.inputs[0].shape[3] === 1;
@ -19,7 +19,7 @@ async function load(config) {
return models.gender;
}
async function predict(image, config) {
export async function predict(image, config) {
if (!models.gender) return null;
if ((skipped < config.face.gender.skipFrames) && config.videoOptimized && last.gender !== '') {
skipped++;
@ -45,7 +45,7 @@ async function predict(image, config) {
tf.dispose(resize);
let genderT;
const obj = {};
const obj = { gender: undefined, confidence: undefined };
if (!config.profile) {
if (config.face.gender.enabled) genderT = await models.gender.predict(enhance);
@ -53,7 +53,6 @@ async function predict(image, config) {
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone();
profileGender.result.dispose();
// @ts-ignore
profile.run('gender', profileGender);
}
enhance.dispose();
@ -82,6 +81,3 @@ async function predict(image, config) {
resolve(obj);
});
}
exports.predict = predict;
exports.load = load;

View File

@ -1,4 +1,4 @@
exports.body = (res) => {
export const body = (res) => {
if (!res) return [];
const gestures = [];
for (let i = 0; i < res.length; i++) {
@ -18,7 +18,7 @@ exports.body = (res) => {
return gestures;
};
exports.face = (res) => {
export const face = (res) => {
if (!res) return [];
const gestures = [];
for (let i = 0; i < res.length; i++) {
@ -39,7 +39,7 @@ exports.face = (res) => {
return gestures;
};
exports.iris = (res) => {
export const iris = (res) => {
if (!res) return [];
const gestures = [];
for (let i = 0; i < res.length; i++) {
@ -58,7 +58,7 @@ exports.iris = (res) => {
return gestures;
};
exports.hand = (res) => {
export const hand = (res) => {
if (!res) return [];
const gestures = [];
for (let i = 0; i < res.length; i++) {

View File

@ -1,4 +1,4 @@
exports.anchors = [
export const anchors = [
{
w: 1,
h: 1,

View File

@ -1,18 +1,20 @@
import * as tf from '../../dist/tfjs.esm.js';
function getBoxSize(box) {
export function getBoxSize(box) {
return [
Math.abs(box.endPoint[0] - box.startPoint[0]),
Math.abs(box.endPoint[1] - box.startPoint[1]),
];
}
function getBoxCenter(box) {
export function getBoxCenter(box) {
return [
box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,
box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,
];
}
function cutBoxFromImageAndResize(box, image, cropSize) {
export function cutBoxFromImageAndResize(box, image, cropSize) {
const h = image.shape[1];
const w = image.shape[2];
const boxes = [[
@ -23,7 +25,8 @@ function cutBoxFromImageAndResize(box, image, cropSize) {
]];
return tf.image.cropAndResize(image, boxes, [0], cropSize);
}
function scaleBoxCoordinates(box, factor) {
export function scaleBoxCoordinates(box, factor) {
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
const palmLandmarks = box.palmLandmarks.map((coord) => {
@ -32,7 +35,8 @@ function scaleBoxCoordinates(box, factor) {
});
return { startPoint, endPoint, palmLandmarks, confidence: box.confidence };
}
function enlargeBox(box, factor = 1.5) {
export function enlargeBox(box, factor = 1.5) {
const center = getBoxCenter(box);
const size = getBoxSize(box);
const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];
@ -40,7 +44,8 @@ function enlargeBox(box, factor = 1.5) {
const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]];
return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };
}
function squarifyBox(box) {
export function squarifyBox(box) {
const centers = getBoxCenter(box);
const size = getBoxSize(box);
const maxEdge = Math.max(...size);
@ -49,7 +54,8 @@ function squarifyBox(box) {
const endPoint = [centers[0] + halfSize, centers[1] + halfSize];
return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };
}
function shiftBox(box, shiftFactor) {
export function shiftBox(box, shiftFactor) {
const boxSize = [
box.endPoint[0] - box.startPoint[0],
box.endPoint[1] - box.startPoint[1],
@ -59,12 +65,3 @@ function shiftBox(box, shiftFactor) {
const endPoint = [box.endPoint[0] + shiftVector[0], box.endPoint[1] + shiftVector[1]];
return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };
}
export {
cutBoxFromImageAndResize,
enlargeBox,
getBoxCenter,
getBoxSize,
scaleBoxCoordinates,
shiftBox,
squarifyBox,
};

View File

@ -1,7 +1,13 @@
import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box';
class HandDetector {
export class HandDetector {
model: any;
anchors: any;
anchorsTensor: any;
inputSizeTensor: any;
doubleInputSizeTensor: any;
constructor(model, inputSize, anchorsAnnotated) {
this.model = model;
this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);
@ -78,4 +84,3 @@ class HandDetector {
return hands;
}
}
exports.HandDetector = HandDetector;

View File

@ -2,7 +2,7 @@ import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box';
import * as util from './util';
// eslint-disable-next-line no-unused-vars
import { log } from '../log.js';
import { log } from '../log';
// const PALM_BOX_SHIFT_VECTOR = [0, -0.4];
const PALM_BOX_ENLARGE_FACTOR = 5; // default 3
@ -12,7 +12,14 @@ const PALM_LANDMARK_IDS = [0, 5, 9, 13, 17, 1, 2];
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
class HandPipeline {
export class HandPipeline {
handDetector: any;
landmarkDetector: any;
inputSize: number;
storedBoxes: any;
skipped: number;
detectedHands: number;
constructor(handDetector, landmarkDetector, inputSize) {
this.handDetector = handDetector;
this.landmarkDetector = landmarkDetector;
@ -154,5 +161,3 @@ class HandPipeline {
return { startPoint, endPoint };
}
}
exports.HandPipeline = HandPipeline;

View File

@ -1,6 +1,6 @@
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html
import { log } from '../log.js';
import { log } from '../log';
import * as tf from '../../dist/tfjs.esm.js';
import * as handdetector from './handdetector';
import * as handpipeline from './handpipeline';
@ -15,7 +15,9 @@ const MESH_ANNOTATIONS = {
palmBase: [0],
};
class HandPose {
export class HandPose {
handPipeline: any;
constructor(handPipeline) {
this.handPipeline = handPipeline;
}
@ -51,20 +53,16 @@ class HandPose {
return hands;
}
}
exports.HandPose = HandPose;
async function load(config) {
export async function load(config) {
const [handDetectorModel, handPoseModel] = await Promise.all([
config.hand.enabled ? tf.loadGraphModel(config.hand.detector.modelPath, { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
config.hand.landmarks ? tf.loadGraphModel(config.hand.skeleton.modelPath, { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
]);
// @ts-ignore
const handDetector = new handdetector.HandDetector(handDetectorModel, config.hand.inputSize, anchors.anchors);
// @ts-ignore
const handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel, config.hand.inputSize);
const handPose = new HandPose(handPipeline);
if (config.hand.enabled) log(`load model: ${config.hand.detector.modelPath.match(/\/(.*)\./)[1]}`);
if (config.hand.landmarks) log(`load model: ${config.hand.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
return handPose;
}
exports.load = load;

View File

@ -1,25 +1,26 @@
import { log } from './log.js';
import { log } from './log';
import * as tf from '../dist/tfjs.esm.js';
import * as backend from './tfjs/backend.js';
import * as facemesh from './blazeface/facemesh.js';
import * as faceboxes from './faceboxes/faceboxes.js';
import * as age from './age/age.js';
import * as gender from './gender/gender.js';
import * as emotion from './emotion/emotion.js';
import * as embedding from './embedding/embedding.js';
import * as posenet from './posenet/posenet.js';
import * as handpose from './handpose/handpose.js';
import * as gesture from './gesture/gesture.js';
import * as image from './image.js';
import * as profile from './profile.js';
import * as config from '../config.js';
import * as sample from './sample.js';
import * as backend from './tfjs/backend';
import * as facemesh from './blazeface/facemesh';
import * as faceboxes from './faceboxes/faceboxes';
import * as age from './age/age';
import * as gender from './gender/gender';
import * as emotion from './emotion/emotion';
import * as embedding from './embedding/embedding';
import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose';
import * as gesture from './gesture/gesture';
import * as image from './image';
import * as profile from './profile';
import * as config from '../config';
import * as sample from './sample';
import * as app from '../package.json';
import { NodeFileSystem } from '@tensorflow/tfjs-node/dist/io/file_system';
// helper function: gets elapsed time on both browser and nodejs
const now = () => {
if (typeof performance !== 'undefined') return performance.now();
return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);
return parseInt((Number(process.hrtime.bigint()) / 1000 / 1000).toString());
};
// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides
@ -42,6 +43,25 @@ function mergeDeep(...objects) {
}
class Human {
tf: any;
version: string;
config: any;
fx: any;
state: string;
numTensors: number;
analyzeMemoryLeaks: boolean;
checkSanity: boolean;
firstRun: boolean;
perf: any;
models: any;
// models
facemesh: any;
age: any;
gender: any;
emotion: any;
body: any;
hand: any;
constructor(userConfig = {}) {
this.tf = tf;
this.version = app.version;
@ -108,7 +128,7 @@ class Human {
}
// preload models, not explicitly required as it's done automatically on first use
async load(userConfig) {
async load(userConfig = null) {
this.state = 'load';
const timeStamp = now();
if (userConfig) this.config = mergeDeep(this.config, userConfig);
@ -160,7 +180,7 @@ class Human {
}
// check if backend needs initialization if it changed
async checkBackend(force) {
async checkBackend(force = false) {
if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {
const timeStamp = now();
this.state = 'backend';
@ -308,7 +328,7 @@ class Human {
emotion: emotionRes,
embedding: embeddingRes,
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
image: face.image.toInt().squeeze(),
// image: face.image.toInt().squeeze(),
});
// dont need face anymore
@ -487,7 +507,8 @@ class Human {
async warmupNode() {
const atob = (str) => Buffer.from(str, 'base64');
const img = this.config.warmup === 'face' ? atob(sample.face) : atob(sample.body);
const data = tf.node.decodeJpeg(img);
// @ts-ignore
const data = tf.node.decodeJpeg(img); // tf.node is only defined when compiling for nodejs
const expanded = data.expandDims(0);
tf.dispose(data);
// log('Input:', expanded);

View File

@ -1,6 +1,6 @@
import { log } from './log.js';
import { log } from './log';
import * as tf from '../dist/tfjs.esm.js';
import * as fxImage from './imagefx.js';
import * as fxImage from './imagefx';
// internal temp canvases
let inCanvas = null;
@ -9,7 +9,7 @@ let outCanvas = null;
// process input image and return tensor
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement
// input is resized and run through imagefx filter
function process(input, config) {
export function process(input, config) {
let tensor;
if (input instanceof tf.Tensor) {
tensor = tf.clone(input);
@ -39,7 +39,7 @@ function process(input, config) {
outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');
if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;
this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
}
if (!this.fx) return inCanvas;
this.fx.reset();
@ -106,5 +106,3 @@ function process(input, config) {
}
return { tensor, canvas: config.filter.return ? outCanvas : null };
}
exports.process = process;

View File

@ -5,7 +5,7 @@ WebGLImageFilter - MIT Licensed
<https://github.com/phoboslab/WebGLImageFilter>
*/
const WebGLProgram = function (gl, vertexSource, fragmentSource) {
const GLProgram = function (gl, vertexSource, fragmentSource) {
const _collect = function (source, prefix, collection) {
const r = new RegExp('\\b' + prefix + ' \\w+ (\\w+)', 'ig');
source.replace(r, (match, name) => {
@ -58,7 +58,7 @@ const WebGLProgram = function (gl, vertexSource, fragmentSource) {
}
};
const WebGLImageFilter = function (params) {
const GLImageFilter = function (params) {
if (!params) params = { };
let _drawCount = 0;
let _sourceTexture = null;
@ -180,7 +180,7 @@ const WebGLImageFilter = function (params) {
return { fbo, texture };
};
const _draw = function (flags) {
const _draw = function (flags = null) {
let source = null;
let target = null;
let flipY = false;
@ -225,7 +225,7 @@ const WebGLImageFilter = function (params) {
}
// Compile shaders
_currentProgram = new WebGLProgram(gl, SHADER.VERTEX_IDENTITY, fragmentSource);
_currentProgram = new GLProgram(gl, SHADER.VERTEX_IDENTITY, fragmentSource);
const floatSize = Float32Array.BYTES_PER_ELEMENT;
const vertSize = 4 * floatSize;
@ -606,4 +606,4 @@ const WebGLImageFilter = function (params) {
].join('\n');
};
exports.Canvas = WebGLImageFilter;
exports.GLImageFilter = GLImageFilter;

View File

@ -1,26 +1,24 @@
import { log } from './log.js';
import { log } from './log';
const profileData = {};
export const data = {};
function profile(name, data) {
if (!data || !data.kernels) return;
export function run(name, raw) {
if (!raw || !raw.kernels) return;
const maxResults = 5;
const time = data.kernels
const time = raw.kernels
.filter((a) => a.kernelTimeMs > 0)
.reduce((a, b) => a += b.kernelTimeMs, 0);
const slowest = data.kernels
const slowest = raw.kernels
.map((a, i) => { a.id = i; return a; })
.filter((a) => a.kernelTimeMs > 0)
.sort((a, b) => b.kernelTimeMs - a.kernelTimeMs);
const largest = data.kernels
const largest = raw.kernels
.map((a, i) => { a.id = i; return a; })
.filter((a) => a.totalBytesSnapshot > 0)
.sort((a, b) => b.totalBytesSnapshot - a.totalBytesSnapshot);
if (slowest.length > maxResults) slowest.length = maxResults;
if (largest.length > maxResults) largest.length = maxResults;
const res = { newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest };
profileData[name] = res;
const res = { newBytes: raw.newBytes, newTensors: raw.newTensors, peakBytes: raw.peakBytes, numKernelOps: raw.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest };
data[name] = res;
log('Human profiler', name, res);
}
exports.run = profile;

View File

@ -1,4 +1,4 @@
import { log } from '../log.js';
import { log } from '../log';
import * as tf from '../../dist/tfjs.esm.js';
export const config = {