mirror of https://github.com/vladmandic/human
updated typings
parent
bfe688251b
commit
4130ddb32f
|
@ -1,4 +1,3 @@
|
|||
// @ts-nocheck
|
||||
/* eslint-disable max-len */
|
||||
|
||||
// based on: https://github.com/munrocket/gl-bench
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// @ts-nocheck
|
||||
|
||||
let instance = 0;
|
||||
let CSScreated = false;
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@
|
|||
"seedrandom": "^3.0.5",
|
||||
"simple-git": "^2.34.2",
|
||||
"tslib": "^2.1.0",
|
||||
"typescript": "^4.1.3"
|
||||
"typescript": "^4.2.0-dev.20210208"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
||||
|
|
|
@ -2,20 +2,20 @@ import { log } from '../log';
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile.js';
|
||||
|
||||
const models = { age: null };
|
||||
let model;
|
||||
let last = { age: 0 };
|
||||
let skipped = Number.MAX_SAFE_INTEGER;
|
||||
|
||||
export async function load(config) {
|
||||
if (!models.age) {
|
||||
models.age = await tf.loadGraphModel(config.face.age.modelPath);
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(config.face.age.modelPath);
|
||||
log(`load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
}
|
||||
return models.age;
|
||||
return model;
|
||||
}
|
||||
|
||||
export async function predict(image, config) {
|
||||
if (!models.age) return null;
|
||||
if (!model) return null;
|
||||
if ((skipped < config.face.age.skipFrames) && config.videoOptimized && last.age && (last.age > 0)) {
|
||||
skipped++;
|
||||
return last;
|
||||
|
@ -38,27 +38,25 @@ export async function predict(image, config) {
|
|||
tf.dispose(resize);
|
||||
|
||||
let ageT;
|
||||
const obj = { age: undefined };
|
||||
const obj = { age: 0 };
|
||||
|
||||
if (models.age) {
|
||||
if (!config.profile) {
|
||||
if (config.face.age.enabled) ageT = await models.age.predict(enhance);
|
||||
} else {
|
||||
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
|
||||
ageT = profileAge.result.clone();
|
||||
profileAge.result.dispose();
|
||||
profile.run('age', profileAge);
|
||||
}
|
||||
enhance.dispose();
|
||||
|
||||
if (ageT) {
|
||||
const data = ageT.dataSync();
|
||||
obj.age = Math.trunc(10 * data[0]) / 10;
|
||||
}
|
||||
ageT.dispose();
|
||||
|
||||
last = obj;
|
||||
if (!config.profile) {
|
||||
if (config.face.age.enabled) ageT = await model.predict(enhance);
|
||||
} else {
|
||||
const profileAge = config.face.age.enabled ? await tf.profile(() => model.predict(enhance)) : {};
|
||||
ageT = profileAge.result.clone();
|
||||
profileAge.result.dispose();
|
||||
profile.run('age', profileAge);
|
||||
}
|
||||
enhance.dispose();
|
||||
|
||||
if (ageT) {
|
||||
const data = ageT.dataSync();
|
||||
obj.age = Math.trunc(10 * data[0]) / 10;
|
||||
}
|
||||
ageT.dispose();
|
||||
|
||||
last = obj;
|
||||
resolve(obj);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -3,8 +3,8 @@ import * as tf from '../../dist/tfjs.esm.js';
|
|||
import * as profile from '../profile.js';
|
||||
|
||||
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];
|
||||
const models = { emotion: null };
|
||||
let last = [];
|
||||
let model;
|
||||
let last: Array<{ score: number, emotion: string }> = [];
|
||||
let skipped = Number.MAX_SAFE_INTEGER;
|
||||
|
||||
// tuning values
|
||||
|
@ -12,15 +12,15 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
|
|||
const scale = 1; // score multiplication factor
|
||||
|
||||
export async function load(config) {
|
||||
if (!models.emotion) {
|
||||
models.emotion = await tf.loadGraphModel(config.face.emotion.modelPath);
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(config.face.emotion.modelPath);
|
||||
log(`load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
}
|
||||
return models.emotion;
|
||||
return model;
|
||||
}
|
||||
|
||||
export async function predict(image, config) {
|
||||
if (!models.emotion) return null;
|
||||
if (!model) return null;
|
||||
if ((skipped < config.face.emotion.skipFrames) && config.videoOptimized && (last.length > 0)) {
|
||||
skipped++;
|
||||
return last;
|
||||
|
@ -54,15 +54,15 @@ export async function predict(image, config) {
|
|||
blueNorm.dispose();
|
||||
const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));
|
||||
grayscale.dispose();
|
||||
const obj = [];
|
||||
const obj: Array<{ score: number, emotion: string }> = [];
|
||||
if (config.face.emotion.enabled) {
|
||||
let data;
|
||||
if (!config.profile) {
|
||||
const emotionT = await models.emotion.predict(normalize);
|
||||
const emotionT = await model.predict(normalize);
|
||||
data = emotionT.dataSync();
|
||||
tf.dispose(emotionT);
|
||||
} else {
|
||||
const profileData = await tf.profile(() => models.emotion.predict(normalize));
|
||||
const profileData = await tf.profile(() => model.predict(normalize));
|
||||
data = profileData.result.dataSync();
|
||||
profileData.result.dispose();
|
||||
profile.run('emotion', profileData);
|
||||
|
|
|
@ -2,7 +2,7 @@ import { log } from '../log';
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile.js';
|
||||
|
||||
const models = { gender: null };
|
||||
let model;
|
||||
let last = { gender: '' };
|
||||
let skipped = Number.MAX_SAFE_INTEGER;
|
||||
let alternative = false;
|
||||
|
@ -11,16 +11,16 @@ let alternative = false;
|
|||
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
||||
|
||||
export async function load(config) {
|
||||
if (!models.gender) {
|
||||
models.gender = await tf.loadGraphModel(config.face.gender.modelPath);
|
||||
alternative = models.gender.inputs[0].shape[3] === 1;
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(config.face.gender.modelPath);
|
||||
alternative = model.inputs[0].shape[3] === 1;
|
||||
log(`load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
}
|
||||
return models.gender;
|
||||
return model;
|
||||
}
|
||||
|
||||
export async function predict(image, config) {
|
||||
if (!models.gender) return null;
|
||||
if (!model) return null;
|
||||
if ((skipped < config.face.gender.skipFrames) && config.videoOptimized && last.gender !== '') {
|
||||
skipped++;
|
||||
return last;
|
||||
|
@ -45,12 +45,12 @@ export async function predict(image, config) {
|
|||
tf.dispose(resize);
|
||||
|
||||
let genderT;
|
||||
const obj = { gender: undefined, confidence: undefined };
|
||||
const obj = { gender: '', confidence: 0 };
|
||||
|
||||
if (!config.profile) {
|
||||
if (config.face.gender.enabled) genderT = await models.gender.predict(enhance);
|
||||
if (config.face.gender.enabled) genderT = await model.predict(enhance);
|
||||
} else {
|
||||
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
|
||||
const profileGender = config.face.gender.enabled ? await tf.profile(() => model.predict(enhance)) : {};
|
||||
genderT = profileGender.result.clone();
|
||||
profileGender.result.dispose();
|
||||
profile.run('gender', profileGender);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
export const body = (res) => {
|
||||
if (!res) return [];
|
||||
const gestures = [];
|
||||
const gestures: Array<{ body: number, gesture: string }> = [];
|
||||
for (let i = 0; i < res.length; i++) {
|
||||
// raising hands
|
||||
const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist'));
|
||||
|
@ -20,7 +20,7 @@ export const body = (res) => {
|
|||
|
||||
export const face = (res) => {
|
||||
if (!res) return [];
|
||||
const gestures = [];
|
||||
const gestures: Array<{ face: number, gesture: string }> = [];
|
||||
for (let i = 0; i < res.length; i++) {
|
||||
if (res[i].mesh && res[i].mesh.length > 0) {
|
||||
const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2];
|
||||
|
@ -41,7 +41,7 @@ export const face = (res) => {
|
|||
|
||||
export const iris = (res) => {
|
||||
if (!res) return [];
|
||||
const gestures = [];
|
||||
const gestures: Array<{ iris: number, gesture: string }> = [];
|
||||
for (let i = 0; i < res.length; i++) {
|
||||
if (!res[i].annotations || !res[i].annotations.leftEyeIris || !res[i].annotations.rightEyeIris) continue;
|
||||
const sizeXLeft = res[i].annotations.leftEyeIris[3][0] - res[i].annotations.leftEyeIris[1][0];
|
||||
|
@ -60,10 +60,11 @@ export const iris = (res) => {
|
|||
|
||||
export const hand = (res) => {
|
||||
if (!res) return [];
|
||||
const gestures = [];
|
||||
const gestures: Array<{ hand: number, gesture: string }> = [];
|
||||
for (let i = 0; i < res.length; i++) {
|
||||
const fingers = [];
|
||||
const fingers: Array<{ name: string, position: number }> = [];
|
||||
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
|
||||
// @ts-ignore
|
||||
if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
|
||||
}
|
||||
if (fingers && fingers.length > 0) {
|
||||
|
|
|
@ -49,7 +49,7 @@ export class HandDetector {
|
|||
|
||||
scoresT.dispose();
|
||||
filteredT.dispose();
|
||||
const hands = [];
|
||||
const hands: Array<{ box: any, palmLandmarks: any, confidence: number }> = [];
|
||||
for (const index of filtered) {
|
||||
if (scores[index] >= config.hand.minConfidence) {
|
||||
const matchingBox = tf.slice(boxes, [index, 0], [1, -1]);
|
||||
|
@ -70,7 +70,7 @@ export class HandDetector {
|
|||
const image = tf.tidy(() => input.resizeBilinear([config.hand.inputSize, config.hand.inputSize]).div(127.5).sub(1));
|
||||
const predictions = await this.getBoxes(image, config);
|
||||
image.dispose();
|
||||
const hands = [];
|
||||
const hands: Array<{}> = [];
|
||||
if (!predictions || predictions.length === 0) return hands;
|
||||
for (const prediction of predictions) {
|
||||
const boxes = prediction.box.dataSync();
|
||||
|
|
|
@ -91,7 +91,7 @@ export class HandPipeline {
|
|||
// for (const possible of boxes) this.storedBoxes.push(possible);
|
||||
if (this.storedBoxes.length > 0) useFreshBox = true;
|
||||
}
|
||||
const hands = [];
|
||||
const hands: Array<{}> = [];
|
||||
// log('hand', `skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
|
||||
|
||||
// go through working set of boxes
|
||||
|
@ -124,10 +124,7 @@ export class HandPipeline {
|
|||
const result = {
|
||||
landmarks: coords,
|
||||
confidence,
|
||||
box: {
|
||||
topLeft: nextBoundingBox.startPoint,
|
||||
bottomRight: nextBoundingBox.endPoint,
|
||||
},
|
||||
box: { topLeft: nextBoundingBox.startPoint, bottomRight: nextBoundingBox.endPoint },
|
||||
};
|
||||
hands.push(result);
|
||||
} else {
|
||||
|
@ -139,10 +136,7 @@ export class HandPipeline {
|
|||
const enlarged = box.enlargeBox(box.squarifyBox(currentBox), HAND_BOX_ENLARGE_FACTOR);
|
||||
const result = {
|
||||
confidence: currentBox.confidence,
|
||||
box: {
|
||||
topLeft: enlarged.startPoint,
|
||||
bottomRight: enlarged.endPoint,
|
||||
},
|
||||
box: { topLeft: enlarged.startPoint, bottomRight: enlarged.endPoint },
|
||||
};
|
||||
hands.push(result);
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ export class HandPose {
|
|||
async estimateHands(input, config) {
|
||||
const predictions = await this.handPipeline.estimateHands(input, config);
|
||||
if (!predictions) return [];
|
||||
const hands = [];
|
||||
const hands: Array<{ confidence: number, box: any, landmarks: any, annotations: any }> = [];
|
||||
for (const prediction of predictions) {
|
||||
const annotations = {};
|
||||
if (prediction.landmarks) {
|
||||
|
@ -43,12 +43,7 @@ export class HandPose {
|
|||
Math.min(input.shape[2], prediction.box.bottomRight[0]) - prediction.box.topLeft[0],
|
||||
Math.min(input.shape[1], prediction.box.bottomRight[1]) - prediction.box.topLeft[1],
|
||||
] : 0;
|
||||
hands.push({
|
||||
confidence: prediction.confidence,
|
||||
box,
|
||||
landmarks: prediction.landmarks,
|
||||
annotations,
|
||||
});
|
||||
hands.push({ confidence: prediction.confidence, box, landmarks: prediction.landmarks, annotations });
|
||||
}
|
||||
return hands;
|
||||
}
|
||||
|
|
11
src/human.ts
11
src/human.ts
|
@ -236,7 +236,7 @@ class Human {
|
|||
let genderRes;
|
||||
let emotionRes;
|
||||
let embeddingRes;
|
||||
const faceRes = [];
|
||||
const faceRes: Array<{ confidence: number, box: any, mesh: any, meshRaw: any, boxRaw: any, annotations: any, age: number, gender: string, genderConfidence: number, emotion: string, embedding: any, iris: number }> = [];
|
||||
this.state = 'run:face';
|
||||
timeStamp = now();
|
||||
const faces = await this.models.face?.estimateFaces(input, this.config);
|
||||
|
@ -349,8 +349,8 @@ class Human {
|
|||
this.state = 'image';
|
||||
this.config = mergeDeep(this.config, userConfig);
|
||||
const process = image.process(input, this.config);
|
||||
process.tensor.dispose();
|
||||
return process.canvas;
|
||||
process?.tensor?.dispose();
|
||||
return process?.canvas;
|
||||
}
|
||||
|
||||
// main detect function
|
||||
|
@ -445,6 +445,7 @@ class Human {
|
|||
let gestureRes = [];
|
||||
if (this.config.gesture.enabled) {
|
||||
timeStamp = now();
|
||||
// @ts-ignore
|
||||
gestureRes = [...gesture.face(faceRes), ...gesture.body(poseRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
|
||||
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
|
||||
else if (this.perf.gesture) delete this.perf.gesture;
|
||||
|
@ -495,8 +496,8 @@ class Human {
|
|||
canvas.width = size;
|
||||
canvas.height = size;
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.drawImage(img, 0, 0);
|
||||
const data = ctx.getImageData(0, 0, size, size);
|
||||
ctx?.drawImage(img, 0, 0);
|
||||
const data = ctx?.getImageData(0, 0, size, size);
|
||||
this.detect(data, this.config).then((res) => resolve(res));
|
||||
};
|
||||
if (src) img.src = src;
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
// @ts-nocheck
|
||||
|
||||
import { log } from './log';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as fxImage from './imagefx';
|
||||
|
|
|
@ -24,12 +24,14 @@ export function register() {
|
|||
if (!tf.findBackend(config.name)) {
|
||||
log('backend registration:', config.name);
|
||||
try {
|
||||
// @ts-ignore
|
||||
config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas');
|
||||
} catch (err) {
|
||||
log('error: cannot create canvas:', err);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
// @ts-ignore
|
||||
config.gl = config.canvas.getContext('webgl2', config.webGLattr);
|
||||
} catch (err) {
|
||||
log('error: cannot get WebGL2 context:', err);
|
||||
|
@ -60,6 +62,7 @@ export function register() {
|
|||
}
|
||||
try {
|
||||
tf.ENV.set('WEBGL_VERSION', 2);
|
||||
// @ts-ignore
|
||||
tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE));
|
||||
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
||||
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||
|
|
|
@ -17,9 +17,9 @@
|
|||
"resolveJsonModule": true,
|
||||
"skipLibCheck": true,
|
||||
"sourceMap": false,
|
||||
"strictNullChecks": false,
|
||||
"strictNullChecks": true,
|
||||
"allowJs": true
|
||||
},
|
||||
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
|
||||
"include": ["src/*", "demo/*"],
|
||||
"include": ["src/*", "src/***/*", "demo/*"],
|
||||
}
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit d88043901df1d671a0897275bae8d0f537b84ff5
|
||||
Subproject commit d365c4e487508181811ea7ff1a26d682ef6896b7
|
Loading…
Reference in New Issue