redefine draw helpers interface

pull/356/head
Vladimir Mandic 2021-09-15 18:58:54 -04:00
parent 5f68153af7
commit 42e6a25294
7 changed files with 46 additions and 49 deletions

View File

@ -11,6 +11,7 @@
### **HEAD -> main** 2021/09/15 mandic00@live.com
- added visual results browser to demo
- reorganize tfjs bundle
- experimental custom tfjs bundle - disabled
- add platform and backend capabilities detection

View File

@ -4,10 +4,8 @@
WebGL shader optimizations for faster load and initial detection
- Implement WebGL uniforms for shaders: <https://github.com/tensorflow/tfjs/issues/5205>
- Fix shader packing: <https://github.com/tensorflow/tfjs/issues/5343>
- Refactor ImageFX as ESM
- Benchmark WGSL for WebGPU
- Add and benchmark WGSL for WebGPU
<br>

View File

@ -2,9 +2,9 @@
* Module that implements helper draw functions, exposed as human.draw
*/
import { TRI468 as triangulation } from '../blazeface/coords';
import { mergeDeep, now } from '../helpers';
import type { Result, FaceResult, BodyResult, HandResult, ObjectResult, GestureResult, PersonResult } from '../result';
import { TRI468 as triangulation } from './blazeface/coords';
import { mergeDeep, now } from './helpers';
import type { Result, FaceResult, BodyResult, HandResult, ObjectResult, GestureResult, PersonResult } from './result';
/**
* Draw Options
@ -139,7 +139,7 @@ function curves(ctx, points: [number, number, number?][] = [], localOptions) {
}
}
export async function gesture(inCanvas: HTMLCanvasElement, result: Array<GestureResult>, drawOptions?: DrawOptions) {
export async function gesture(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Array<GestureResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
@ -164,7 +164,7 @@ export async function gesture(inCanvas: HTMLCanvasElement, result: Array<Gesture
}
}
export async function face(inCanvas: HTMLCanvasElement, result: Array<FaceResult>, drawOptions?: DrawOptions) {
export async function face(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Array<FaceResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
@ -266,7 +266,7 @@ export async function face(inCanvas: HTMLCanvasElement, result: Array<FaceResult
}
}
export async function body(inCanvas: HTMLCanvasElement, result: Array<BodyResult>, drawOptions?: DrawOptions) {
export async function body(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Array<BodyResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
@ -376,7 +376,7 @@ export async function body(inCanvas: HTMLCanvasElement, result: Array<BodyResult
}
}
export async function hand(inCanvas: HTMLCanvasElement, result: Array<HandResult>, drawOptions?: DrawOptions) {
export async function hand(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Array<HandResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
@ -441,7 +441,7 @@ export async function hand(inCanvas: HTMLCanvasElement, result: Array<HandResult
}
}
export async function object(inCanvas: HTMLCanvasElement, result: Array<ObjectResult>, drawOptions?: DrawOptions) {
export async function object(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Array<ObjectResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
@ -466,7 +466,7 @@ export async function object(inCanvas: HTMLCanvasElement, result: Array<ObjectRe
}
}
export async function person(inCanvas: HTMLCanvasElement, result: Array<PersonResult>, drawOptions?: DrawOptions) {
export async function person(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Array<PersonResult>, drawOptions?: Partial<DrawOptions>) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
const ctx = getCanvasContext(inCanvas);
@ -492,38 +492,24 @@ export async function person(inCanvas: HTMLCanvasElement, result: Array<PersonRe
}
}
export async function canvas(input: HTMLCanvasElement | HTMLImageElement | HTMLMediaElement | HTMLVideoElement, output: HTMLCanvasElement) {
export async function canvas(input: HTMLCanvasElement | OffscreenCanvas | HTMLImageElement | HTMLMediaElement | HTMLVideoElement, output: HTMLCanvasElement) {
if (!input || !output) return;
const ctx = getCanvasContext(output);
ctx.drawImage(input, 0, 0);
}
export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptions?: DrawOptions) {
export async function all(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Result, drawOptions?: Partial<DrawOptions>) {
if (!result || !result.performance || !result || !inCanvas) return null;
const timestamp = now();
const localOptions = mergeDeep(options, drawOptions);
const promise = Promise.all([
face(inCanvas, result.face, localOptions),
body(inCanvas, result.body, localOptions),
hand(inCanvas, result.hand, localOptions),
object(inCanvas, result.object, localOptions),
// person(inCanvas, result.persons, localOptions);
gesture(inCanvas, result.gesture, localOptions), // gestures do not have buffering
// person(inCanvas, result.persons, localOptions); // already included above
]);
/*
if (!bufferedResult) bufferedResult = result; // first pass
else if (localOptions.bufferedOutput) calcBuffered(result); // do results interpolation
else bufferedResult = result; // or just use results as-is
const promises: Promise<void>[] = [];
promises.push(face(inCanvas, bufferedResult.face, localOptions));
promises.push(body(inCanvas, bufferedResult.body, localOptions));
promises.push(hand(inCanvas, bufferedResult.hand, localOptions));
promises.push(object(inCanvas, bufferedResult.object, localOptions));
// promises.push(person(inCanvas, bufferedResult.persons, localOptions));
promises.push(gesture(inCanvas, result.gesture, localOptions)); // gestures do not have buffering
// await Promise.all(promises);
*/
result.performance.draw = Math.trunc(now() - timestamp);
return promise;
}

View File

@ -22,8 +22,7 @@ export class HandPipeline {
constructor(handDetector, handPoseModel) {
this.handDetector = handDetector;
this.handPoseModel = handPoseModel;
// @ts-ignore model is not undefined here
this.inputSize = this.handPoseModel?.inputs[0].shape[2];
this.inputSize = this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0;
this.storedBoxes = [];
this.skipped = 0;
this.detectedHands = 0;
@ -100,7 +99,7 @@ export class HandPipeline {
// for (const possible of boxes) this.storedBoxes.push(possible);
if (this.storedBoxes.length > 0) useFreshBox = true;
}
const hands: Array<{ landmarks?: number[], confidence: number, box: { topLeft: number[], bottomRight: number[] } }> = [];
const hands: Array<{ landmarks: number[], confidence: number, box: { topLeft: number[], bottomRight: number[] } }> = [];
// go through working set of boxes
for (let i = 0; i < this.storedBoxes.length; i++) {
@ -146,6 +145,7 @@ export class HandPipeline {
const result = {
confidence: currentBox.confidence,
box: { topLeft: enlarged.startPoint, bottomRight: enlarged.endPoint },
landmarks: [],
};
hands.push(result);
}

View File

@ -32,7 +32,6 @@ export async function predict(input: Tensor, config: Config): Promise<HandResult
const annotations = {};
if (predictions[i].landmarks) {
for (const key of Object.keys(meshAnnotations)) {
// @ts-ignore landmarks are not undefined
annotations[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
}
}

View File

@ -4,7 +4,7 @@
import { log, now, mergeDeep } from './helpers';
import { Config, defaults } from './config';
import type { Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult } from './result';
import type { Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult } from './result';
import * as tf from '../dist/tfjs.esm.js';
import * as models from './models';
import * as face from './face';
@ -20,7 +20,7 @@ import * as centernet from './object/centernet';
import * as segmentation from './segmentation/segmentation';
import * as gesture from './gesture/gesture';
import * as image from './image/image';
import * as draw from './draw/draw';
import * as draw from './draw';
import * as persons from './persons';
import * as interpolate from './interpolate';
import * as env from './env';
@ -28,17 +28,18 @@ import * as backend from './tfjs/backend';
import * as app from '../package.json';
import * as warmups from './warmup';
import type { Tensor, GraphModel } from './tfjs/types';
import type { DrawOptions } from './draw';
// export types
export * from './config';
export * from './result';
export type { DrawOptions } from './draw/draw';
export type { DrawOptions } from './draw';
export { env } from './env';
/** Defines all possible input types for **Human** detection
* @typedef Input Type
*/
export type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas | typeof Image | typeof env.env.Canvas;
export type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
/** Events dispatched by `human.events`
* - `create`: triggered when Human object is instantiated
@ -73,29 +74,36 @@ export type TensorFlow = typeof tf;
export class Human {
/** Current version of Human library in *semver* format */
version: string;
/** Current configuration
* - Details: {@link Config}
*/
config: Config;
/** Last known result of detect run
* - Can be accessed anytime after initial detection
*/
result: Result;
/** Current state of Human library
* - Can be polled to determine operations that are currently executed
* - Progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle'
*/
state: string;
/** currenty processed image tensor and canvas */
process: { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null };
/** @internal: Instance of TensorFlow/JS used by Human
* - Can be embedded or externally provided
*/
tf: TensorFlow;
/**
* Object containing environment information used for diagnostics
*/
env: env.Env;
/** Draw helper classes that can draw detected objects on canvas using specified draw
* - options: {@link DrawOptions} global settings for all draw operations, can be overriden for each draw method
* - face: draw detected faces
@ -104,15 +112,9 @@ export class Human {
* - canvas: draw this.processed canvas which is a this.processed copy of the input
* - all: meta-function that performs: canvas, face, body, hand
*/
draw: {
options: draw.DrawOptions,
gesture: typeof draw.gesture,
face: typeof draw.face,
body: typeof draw.body,
hand: typeof draw.hand,
canvas: typeof draw.canvas,
all: typeof draw.all,
};
// draw: typeof draw;
draw: { canvas, face, body, hand, gesture, object, person, all, options: DrawOptions };
/** @internal: Currently loaded models */
models: {
face: [unknown, GraphModel | null, GraphModel | null] | null,
@ -130,6 +132,7 @@ export class Human {
faceres: GraphModel | null,
segmentation: GraphModel | null,
};
/** Container for events dispatched by Human
*
* Possible events:
@ -166,7 +169,6 @@ export class Human {
Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself
this.config = mergeDeep(defaults, userConfig || {});
this.tf = tf;
this.draw = draw;
this.state = 'idle';
this.#numTensors = 0;
this.#analyzeMemoryLeaks = false;
@ -177,11 +179,11 @@ export class Human {
// object that contains all initialized models
this.models = {
face: null, // array of models
handpose: null, // array of models
posenet: null,
blazepose: null,
efficientpose: null,
movenet: null,
handpose: null, // array of models
age: null,
gender: null,
emotion: null,
@ -191,6 +193,18 @@ export class Human {
faceres: null,
segmentation: null,
};
// reexport draw methods
this.draw = {
options: draw.options as DrawOptions,
canvas: (input: HTMLCanvasElement | OffscreenCanvas | HTMLImageElement | HTMLMediaElement | HTMLVideoElement, output: HTMLCanvasElement) => draw.canvas(input, output),
face: (output: HTMLCanvasElement | OffscreenCanvas, result: FaceResult[], options?: Partial<DrawOptions>) => draw.face(output, result, options),
body: (output: HTMLCanvasElement | OffscreenCanvas, result: BodyResult[], options?: Partial<DrawOptions>) => draw.body(output, result, options),
hand: (output: HTMLCanvasElement | OffscreenCanvas, result: HandResult[], options?: Partial<DrawOptions>) => draw.hand(output, result, options),
gesture: (output: HTMLCanvasElement | OffscreenCanvas, result: GestureResult[], options?: Partial<DrawOptions>) => draw.gesture(output, result, options),
object: (output: HTMLCanvasElement | OffscreenCanvas, result: ObjectResult[], options?: Partial<DrawOptions>) => draw.object(output, result, options),
person: (output: HTMLCanvasElement | OffscreenCanvas, result: PersonResult[], options?: Partial<DrawOptions>) => draw.person(output, result, options),
all: (output: HTMLCanvasElement | OffscreenCanvas, result: Result, options?: Partial<DrawOptions>) => draw.all(output, result, options),
};
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
// export access to image this.processing
// @ts-ignore eslint-typescript cannot correctly infer type in anonymous function

View File

@ -40,4 +40,3 @@ async function main() {
}
main();
// @ts-ignore // in nodejs+wasm must set explicitly before using human