exception handling

pull/293/head
Vladimir Mandic 2021-04-09 10:02:40 -04:00
parent d9bc088582
commit aaec742c0a
11 changed files with 76 additions and 59 deletions

View File

@ -1,6 +1,6 @@
# @vladmandic/human # @vladmandic/human
Version: **1.4.0** Version: **1.4.1**
Description: **Human: AI-powered 3D Face Detection, Face Description & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition** Description: **Human: AI-powered 3D Face Detection, Face Description & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition**
Author: **Vladimir Mandic <mandic00@live.com>** Author: **Vladimir Mandic <mandic00@live.com>**
@ -9,8 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog ## Changelog
### **HEAD -> main** 2021/04/08 mandic00@live.com ### **1.4.1** 2021/04/09 mandic00@live.com
- add modelbasepath option
### **1.3.5** 2021/04/06 mandic00@live.com ### **1.3.5** 2021/04/06 mandic00@live.com

View File

@ -30,9 +30,10 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) fo
## Demos ## Demos
- [**Demo Application**](https://vladmandic.github.io/human/demo/index.html) - [**Main Application**](https://vladmandic.github.io/human/demo/index.html)
- [**Face Extraction, Description, Identification and Matching**](https://vladmandic.github.io/human/demo/facematch.html) - [**Face Extraction, Description, Identification and Matching**](https://vladmandic.github.io/human/demo/facematch.html)
- [**Face Extraction and 3D Rendering**](https://vladmandic.github.io/human/demo/face3d.html) - [**Face Extraction and 3D Rendering**](https://vladmandic.github.io/human/demo/face3d.html)
- [**Details on Demo Applications**](https://github.com/vladmandic/human/wiki/Demos)
## Project pages ## Project pages
@ -47,7 +48,6 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) fo
## Wiki pages ## Wiki pages
- [**Home**](https://github.com/vladmandic/human/wiki) - [**Home**](https://github.com/vladmandic/human/wiki)
- [**Demos**](https://github.com/vladmandic/human/wiki/Demos)
- [**Installation**](https://github.com/vladmandic/human/wiki/Install) - [**Installation**](https://github.com/vladmandic/human/wiki/Install)
- [**Usage & Functions**](https://github.com/vladmandic/human/wiki/Usage) - [**Usage & Functions**](https://github.com/vladmandic/human/wiki/Usage)
- [**Configuration Details**](https://github.com/vladmandic/human/wiki/Configuration) - [**Configuration Details**](https://github.com/vladmandic/human/wiki/Configuration)
@ -185,7 +185,7 @@ For more info, see [**Configuration Details**](https://github.com/vladmandic/hum
<br><hr><br> <br><hr><br>
`Human` library is written in `TypeScript` [4.3](https://www.typescriptlang.org/docs/handbook/intro.html) `Human` library is written in `TypeScript` [4.2](https://www.typescriptlang.org/docs/handbook/intro.html)
Conforming to `JavaScript` [ECMAScript version 2020](https://www.ecma-international.org/ecma-262/11.0/index.html) standard Conforming to `JavaScript` [ECMAScript version 2020](https://www.ecma-international.org/ecma-262/11.0/index.html) standard
Build target is `JavaScript` **EMCAScript version 2018** Build target is `JavaScript` **EMCAScript version 2018**

View File

@ -1,5 +1,3 @@
// @ts-nocheck
let instance = 0; let instance = 0;
let CSScreated = false; let CSScreated = false;

View File

@ -5,6 +5,7 @@ import Menu from './helpers/menu.js';
import GLBench from './helpers/gl-bench.js'; import GLBench from './helpers/gl-bench.js';
const userConfig = { backend: 'webgl' }; // add any user configuration overrides const userConfig = { backend: 'webgl' }; // add any user configuration overrides
let human;
/* /*
const userConfig = { const userConfig = {
@ -42,6 +43,7 @@ const ui = {
console: true, // log messages to browser console console: true, // log messages to browser console
maxFPSframes: 10, // keep fps history for how many frames maxFPSframes: 10, // keep fps history for how many frames
modelsPreload: true, // preload human models on startup modelsPreload: true, // preload human models on startup
modelsWarmup: true, // warmup human models on startup
busy: false, // internal camera busy flag busy: false, // internal camera busy flag
menuWidth: 0, // internal menuWidth: 0, // internal
menuHeight: 0, // internal menuHeight: 0, // internal
@ -89,12 +91,6 @@ function status(msg) {
if (div) div.innerText = msg; if (div) div.innerText = msg;
} }
const human = new Human(userConfig);
if (typeof tf !== 'undefined') {
log('TensorFlow external version:', tf.version);
human.tf = tf; // use externally loaded version of tfjs
}
const compare = { enabled: false, original: null }; const compare = { enabled: false, original: null };
async function calcSimmilariry(result) { async function calcSimmilariry(result) {
document.getElementById('compare-container').style.display = compare.enabled ? 'block' : 'none'; document.getElementById('compare-container').style.display = compare.enabled ? 'block' : 'none';
@ -564,20 +560,57 @@ async function drawWarmup(res) {
async function main() { async function main() {
log('demo starting ...'); log('demo starting ...');
// parse url search params
const params = new URLSearchParams(location.search);
log('url options:', params.toString());
if (params.has('worker')) {
ui.useWorker = JSON.parse(params.get('worker'));
log('overriding worker:', ui.useWorker);
}
if (params.has('backend')) {
userConfig.backend = JSON.parse(params.get('backend'));
log('overriding backend:', userConfig.backend);
}
if (params.has('preload')) {
ui.modelsPreload = JSON.parse(params.get('preload'));
log('overriding preload:', ui.modelsPreload);
}
if (params.has('warmup')) {
ui.modelsWarmup = JSON.parse(params.get('warmup'));
log('overriding warmup:', ui.modelsWarmup);
}
// create instance of human
human = new Human(userConfig);
if (typeof tf !== 'undefined') {
log('TensorFlow external version:', tf.version);
human.tf = tf; // use externally loaded version of tfjs
}
// setup main menu
setupMenu(); setupMenu();
document.getElementById('log').innerText = `Human: version ${human.version}`; document.getElementById('log').innerText = `Human: version ${human.version}`;
// preload models
if (ui.modelsPreload && !ui.useWorker) { if (ui.modelsPreload && !ui.useWorker) {
status('loading'); status('loading');
await human.load(userConfig); // this is not required, just pre-loads all models await human.load(userConfig); // this is not required, just pre-loads all models
const loaded = Object.keys(human.models).filter((a) => human.models[a]); const loaded = Object.keys(human.models).filter((a) => human.models[a]);
log('demo loaded models:', loaded); log('demo loaded models:', loaded);
} }
if (!ui.useWorker) {
// warmup models
if (ui.modelsWarmup && !ui.useWorker) {
status('initializing'); status('initializing');
const res = await human.warmup(userConfig); // this is not required, just pre-warms all models for faster initial inference const res = await human.warmup(userConfig); // this is not required, just pre-warms all models for faster initial inference
if (res && res.canvas && ui.drawWarmup) await drawWarmup(res); if (res && res.canvas && ui.drawWarmup) await drawWarmup(res);
} }
// setup camera
await setupCamera(); await setupCamera();
// ready
status('human: ready'); status('human: ready');
document.getElementById('loader').style.display = 'none'; document.getElementById('loader').style.display = 'none';
document.getElementById('play').style.display = 'block'; document.getElementById('play').style.display = 'block';

View File

@ -64,8 +64,7 @@ export const hand = (res) => {
for (let i = 0; i < res.length; i++) { for (let i = 0; i < res.length; i++) {
const fingers: Array<{ name: string, position: number }> = []; const fingers: Array<{ name: string, position: number }> = [];
for (const [finger, pos] of Object.entries(res[i]['annotations'])) { for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
// @ts-ignore if (finger !== 'palmBase' && Array.isArray(pos)) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
} }
if (fingers && fingers.length > 0) { if (fingers && fingers.length > 0) {
const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a)); const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));

View File

@ -124,7 +124,7 @@ export class Human {
faceres: null, faceres: null,
}; };
// export access to image processing // export access to image processing
// @ts-ignore // @ts-ignore // typescript cannot infer type
this.image = (input: Input) => image.process(input, this.config); this.image = (input: Input) => image.process(input, this.config);
// export raw access to underlying models // export raw access to underlying models
this.classes = { this.classes = {
@ -214,9 +214,9 @@ export class Human {
this.models.gender, this.models.gender,
this.models.emotion, this.models.emotion,
this.models.embedding, this.models.embedding,
// @ts-ignore // @ts-ignore // typescript cannot infer type
this.models.handpose, this.models.handpose,
// @ts-ignore false warning with latest @typescript-eslint // @ts-ignore // typescript cannot infer type
this.models.posenet, this.models.posenet,
this.models.blazepose, this.models.blazepose,
this.models.efficientpose, this.models.efficientpose,
@ -422,15 +422,14 @@ export class Human {
if (this.config.async) { if (this.config.async) {
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
} }
process.tensor.dispose(); tf.dispose(process.tensor);
if (this.config.scoped) this.tf.engine().endScope(); if (this.config.scoped) this.tf.engine().endScope();
this.analyze('End Scope:'); this.analyze('End Scope:');
let gestureRes = []; let gestureRes: any[] = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
// @ts-ignore
gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)]; gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp); if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
else if (this.perf.gesture) delete this.perf.gesture; else if (this.perf.gesture) delete this.perf.gesture;
@ -507,8 +506,8 @@ export class Human {
#warmupNode = async () => { #warmupNode = async () => {
const atob = (str) => Buffer.from(str, 'base64'); const atob = (str) => Buffer.from(str, 'base64');
const img = this.config.warmup === 'face' ? atob(sample.face) : atob(sample.body); const img = this.config.warmup === 'face' ? atob(sample.face) : atob(sample.body);
// @ts-ignore // @ts-ignore // tf.node is only defined when compiling for nodejs
const data = tf.node.decodeJpeg(img); // tf.node is only defined when compiling for nodejs const data = tf.node?.decodeJpeg(img);
const expanded = data.expandDims(0); const expanded = data.expandDims(0);
this.tf.dispose(data); this.tf.dispose(data);
// log('Input:', expanded); // log('Input:', expanded);

View File

@ -1,19 +1,17 @@
// @ts-nocheck
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as fxImage from './imagefx'; import * as fxImage from './imagefx';
const maxSize = 2048; const maxSize = 2048;
// internal temp canvases // internal temp canvases
let inCanvas = null; let inCanvas;
let outCanvas = null; let outCanvas;
// instance of fximage // instance of fximage
let fx = null; let fx;
// process input image and return tensor // process input image and return tensor
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement // input can be tensor, imagedata, htmlimageelement, htmlvideoelement
// input is resized and run through imagefx filter // input is resized and run through imagefx filter
export function process(input, config): { tensor: tf.Tensor, canvas: OffscreenCanvas | HTMLCanvasElement } { export function process(input, config): { tensor: typeof tf.Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement } {
let tensor; let tensor;
if (!input) throw new Error('Human: Input is missing'); if (!input) throw new Error('Human: Input is missing');
if ( if (
@ -32,8 +30,8 @@ export function process(input, config): { tensor: tf.Tensor, canvas: OffscreenCa
if (input instanceof tf.Tensor) { if (input instanceof tf.Tensor) {
tensor = tf.clone(input); tensor = tf.clone(input);
} else { } else {
const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0)); const originalWidth = input['naturalWidth'] || input['videoWidth'] || input['width'] || (input['shape'] && (input['shape'][1] > 0));
const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0)); const originalHeight = input['naturalHeight'] || input['videoHeight'] || input['height'] || (input['shape'] && (input['shape'][2] > 0));
let targetWidth = originalWidth; let targetWidth = originalWidth;
let targetHeight = originalHeight; let targetHeight = originalHeight;
if (targetWidth > maxSize) { if (targetWidth > maxSize) {
@ -49,19 +47,19 @@ export function process(input, config): { tensor: tf.Tensor, canvas: OffscreenCa
if (config.filter.height > 0) targetHeight = config.filter.height; if (config.filter.height > 0) targetHeight = config.filter.height;
else if (config.filter.width > 0) targetHeight = originalHeight * (config.filter.width / originalWidth); else if (config.filter.width > 0) targetHeight = originalHeight * (config.filter.width / originalWidth);
if (!targetWidth || !targetHeight) throw new Error('Human: Input cannot determine dimension'); if (!targetWidth || !targetHeight) throw new Error('Human: Input cannot determine dimension');
if (!inCanvas || (inCanvas.width !== targetWidth) || (inCanvas.height !== targetHeight)) { if (!inCanvas || (inCanvas?.width !== targetWidth) || (inCanvas?.height !== targetHeight)) {
inCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas'); inCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
if (inCanvas.width !== targetWidth) inCanvas.width = targetWidth; if (inCanvas?.width !== targetWidth) inCanvas.width = targetWidth;
if (inCanvas.height !== targetHeight) inCanvas.height = targetHeight; if (inCanvas?.height !== targetHeight) inCanvas.height = targetHeight;
} }
const ctx = inCanvas.getContext('2d'); const ctx = inCanvas.getContext('2d');
if (input instanceof ImageData) ctx.putImageData(input, 0, 0); if (input instanceof ImageData) ctx.putImageData(input, 0, 0);
else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height); else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
if (config.filter.enabled) { if (config.filter.enabled) {
if (!fx || !outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas.height !== outCanvas.height)) { if (!fx || !outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas?.height !== outCanvas?.height)) {
outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas'); outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas?.width, inCanvas?.height) : document.createElement('canvas');
if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width; if (outCanvas?.width !== inCanvas?.width) outCanvas.width = inCanvas?.width;
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height; if (outCanvas?.height !== inCanvas?.height) outCanvas.height = inCanvas?.height;
// log('created FX filter'); // log('created FX filter');
fx = tf.ENV.flags.IS_BROWSER ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined') fx = tf.ENV.flags.IS_BROWSER ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
} }

View File

@ -17,10 +17,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
const shader = gl.createShader(type); const shader = gl.createShader(type);
gl.shaderSource(shader, source); gl.shaderSource(shader, source);
gl.compileShader(shader); gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));
// @ts-ignore
throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));
}
return shader; return shader;
}; };
@ -33,10 +30,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.attachShader(this.id, _fsh); gl.attachShader(this.id, _fsh);
gl.linkProgram(this.id); gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) { if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));
// @ts-ignore
throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));
}
gl.useProgram(this.id); gl.useProgram(this.id);
// Collect attributes // Collect attributes
@ -123,7 +117,6 @@ export function GLImageFilter(params) {
}; };
const _getTempFramebuffer = function (index) { const _getTempFramebuffer = function (index) {
// @ts-ignore
_tempFramebuffers[index] = _tempFramebuffers[index] || _createFramebufferTexture(_width, _height); _tempFramebuffers[index] = _tempFramebuffers[index] || _createFramebufferTexture(_width, _height);
return _tempFramebuffers[index]; return _tempFramebuffers[index];
}; };
@ -138,7 +131,6 @@ export function GLImageFilter(params) {
source = _sourceTexture; source = _sourceTexture;
} else { } else {
// All following draw calls use the temp buffer last drawn to // All following draw calls use the temp buffer last drawn to
// @ts-ignore
source = _getTempFramebuffer(_currentFramebufferIndex)?.texture; source = _getTempFramebuffer(_currentFramebufferIndex)?.texture;
} }
_drawCount++; _drawCount++;
@ -151,7 +143,6 @@ export function GLImageFilter(params) {
} else { } else {
// Intermediate draw call - get a temp buffer to draw to // Intermediate draw call - get a temp buffer to draw to
_currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2; _currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2;
// @ts-ignore
target = _getTempFramebuffer(_currentFramebufferIndex)?.fbo; target = _getTempFramebuffer(_currentFramebufferIndex)?.fbo;
} }
// Bind the source and target and draw the two triangles // Bind the source and target and draw the two triangles

View File

@ -12,8 +12,9 @@ const scaleBox = 2.5; // increase box size
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
model = await tf.loadGraphModel(join(config.modelBasePath, config.object.modelPath)); model = await tf.loadGraphModel(join(config.modelBasePath, config.object.modelPath));
// @ts-ignore const inputs = Object.values(model.modelSignature['inputs']);
model.inputSize = parseInt(Object.values(model.modelSignature['inputs'])[0].tensorShape.dim[2].size); model.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
if (!model.inputSize) throw new Error(`Human: Cannot determine model inputSize: ${config.object.modelPath}`);
if (!model || !model.modelUrl) log('load model failed:', config.object.modelPath); if (!model || !model.modelUrl) log('load model failed:', config.object.modelPath);
else if (config.debug) log('load model:', model.modelUrl); else if (config.debug) log('load model:', model.modelUrl);
} }

View File

@ -4,8 +4,8 @@ import * as tf from '../../dist/tfjs.esm.js';
export const config = { export const config = {
name: 'humangl', name: 'humangl',
priority: 99, priority: 99,
canvas: null, canvas: <null | OffscreenCanvas | HTMLCanvasElement>null,
gl: null, gl: <any>null,
width: 1024, width: 1024,
height: 1024, height: 1024,
webGLattr: { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.2 webGLattr: { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.2
@ -24,14 +24,12 @@ export function register(): void {
if (!tf.findBackend(config.name)) { if (!tf.findBackend(config.name)) {
log('backend registration:', config.name); log('backend registration:', config.name);
try { try {
// @ts-ignore
config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas'); config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas');
} catch (err) { } catch (err) {
log('error: cannot create canvas:', err); log('error: cannot create canvas:', err);
return; return;
} }
try { try {
// @ts-ignore
config.gl = config.canvas.getContext('webgl2', config.webGLattr); config.gl = config.canvas.getContext('webgl2', config.webGLattr);
} catch (err) { } catch (err) {
log('error: cannot get WebGL2 context:', err); log('error: cannot get WebGL2 context:', err);
@ -62,7 +60,6 @@ export function register(): void {
} }
try { try {
tf.ENV.set('WEBGL_VERSION', 2); tf.ENV.set('WEBGL_VERSION', 2);
// @ts-ignore
// tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE)); // tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE));
// tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
// tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true); // tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);

2
wiki

@ -1 +1 @@
Subproject commit d09d36cf9de309728504402bf301ac3ab018de65 Subproject commit 652dee1013ae99789199cc229d6652b3323ae7de