diff --git a/package.json b/package.json index 56be961e..655c5753 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,7 @@ "start": "node --no-warnings demo/nodejs/node.js", "dev": "build --profile development", "build": "rimraf test/build.log && build --profile production", - "test": "node --no-warnings --unhandled-rejections=strict --trace-uncaught test/test.js", + "test": "node --no-warnings --unhandled-rejections=strict --trace-uncaught test/node.js", "lint": "eslint src demo test", "scan": "npx auditjs@latest ossi --dev --quiet" }, diff --git a/src/config.ts b/src/config.ts index 363782a4..2db40193 100644 --- a/src/config.ts +++ b/src/config.ts @@ -216,10 +216,10 @@ export interface Config { */ cacheSensitivity: number; - /** Cache sensitivity - * - values 0..1 where 0.01 means reset cache if input changed more than 1% - * - set to 0 to disable caching - */ + /** Yield to main thread periodically */ + yield: boolean; + + /** Internal Variable */ skipFrame: boolean; /** Run input through image filters before inference @@ -262,6 +262,7 @@ const config: Config = { cacheSensitivity: 0.75, // cache sensitivity // values 0..1 where 0.01 means reset cache if input changed more than 1% // set to 0 to disable caching + yield: false, // yield to main thread periodically skipFrame: false, // internal & dynamic filter: { // run input through image filters before inference // image filters run with near-zero latency as they are executed on the GPU diff --git a/src/env.ts b/src/env.ts index f0f8ecb4..effb677f 100644 --- a/src/env.ts +++ b/src/env.ts @@ -1,7 +1,8 @@ import * as tf from '../dist/tfjs.esm.js'; import * as image from './image/image'; +import { mergeDeep } from './helpers'; -export interface Env { +export type Env = { browser: undefined | boolean, node: undefined | boolean, worker: undefined | boolean, @@ -12,6 +13,7 @@ export interface Env { tfjs: { version: undefined | string, }, + offscreen: undefined | boolean, wasm: { supported: undefined | boolean, backend: undefined | boolean, @@ -34,7 +36,8 @@ export interface Env { Image: undefined, } -export const env: Env = { +// eslint-disable-next-line import/no-mutable-exports +export let env: Env = { browser: undefined, node: undefined, worker: undefined, @@ -42,6 +45,7 @@ export const env: Env = { agent: undefined, initial: true, backends: [], + offscreen: undefined, tfjs: { version: undefined, }, @@ -127,6 +131,8 @@ export async function get() { env.worker = env.browser ? (typeof WorkerGlobalScope !== 'undefined') : undefined; env.tfjs.version = tf.version_core; + // offscreencanvas supported? + env.offscreen = typeof env.offscreen === 'undefined' ? typeof OffscreenCanvas !== undefined : env.offscreen; // get platform and agent if (typeof navigator !== 'undefined') { const raw = navigator.userAgent.match(/\(([^()]+)\)/g); @@ -141,9 +147,12 @@ export async function get() { env.platform = `${process.platform} ${process.arch}`; env.agent = `NodeJS ${process.version}`; } - await backendInfo(); // get cpu info // await cpuInfo(); } + +export async function set(obj) { + env = mergeDeep(env, obj); +} diff --git a/src/helpers.ts b/src/helpers.ts index 17cf797c..dc73184c 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -63,3 +63,9 @@ export const minmax = (data: Array) => data.reduce((acc: Array, acc[1] = (acc[1] === undefined || val > acc[1]) ? val : acc[1]; return acc; }, []); + +// helper function: async wait +export async function wait(time) { + const waiting = new Promise((resolve) => setTimeout(() => resolve(true), time)); + await waiting; +} diff --git a/src/human.ts b/src/human.ts index e73213f9..467b1aa7 100644 --- a/src/human.ts +++ b/src/human.ts @@ -2,7 +2,7 @@ * Human main module */ -import { log, now, mergeDeep, validate } from './helpers'; +import { log, now, mergeDeep, validate, wait } from './helpers'; import { Config, defaults } from './config'; import type { Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult } from './result'; import * as tf from '../dist/tfjs.esm.js'; @@ -35,7 +35,7 @@ import type { DrawOptions } from './draw'; export * from './config'; export * from './result'; export type { DrawOptions } from './draw'; -export { env } from './env'; +export { env, Env } from './env'; /** Defines all possible input types for **Human** detection * @typedef Input Type @@ -320,6 +320,7 @@ export class Human { */ init() { backend.check(this); + env.set(this.env); } /** Load method preloads all configured models on-demand @@ -395,6 +396,7 @@ export class Human { */ async detect(input: Input, userConfig?: Partial): Promise { // detection happens inside a promise + if (this.config.yield) await wait(1); return new Promise(async (resolve) => { this.state = 'config'; let timeStamp; @@ -419,6 +421,7 @@ export class Human { // load models if enabled await this.load(); + if (this.config.yield) await wait(1); timeStamp = now(); let img = image.process(input, this.config); this.process = img; @@ -465,11 +468,11 @@ export class Human { let objectRes: ObjectResult[] | Promise | never[] = []; // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion + this.state = 'run:face'; if (this.config.async) { faceRes = this.config.face.enabled ? face.detectFace(this, img.tensor) : []; if (this.performance.face) delete this.performance.face; } else { - this.state = 'run:face'; timeStamp = now(); faceRes = this.config.face.enabled ? await face.detectFace(this, img.tensor) : []; elapsedTime = Math.trunc(now() - timeStamp); @@ -478,6 +481,7 @@ export class Human { // run body: can be posenet, blazepose, efficientpose, movenet this.analyze('Start Body:'); + this.state = 'run:body'; if (this.config.async) { if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? posenet.predict(img.tensor, this.config) : []; else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(img.tensor, this.config) : []; @@ -485,7 +489,6 @@ export class Human { else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? movenet.predict(img.tensor, this.config) : []; if (this.performance.body) delete this.performance.body; } else { - this.state = 'run:body'; timeStamp = now(); if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(img.tensor, this.config) : []; else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(img.tensor, this.config) : []; @@ -498,11 +501,11 @@ export class Human { // run handpose this.analyze('Start Hand:'); + this.state = 'run:hand'; if (this.config.async) { handRes = this.config.hand.enabled ? handpose.predict(img.tensor, this.config) : []; if (this.performance.hand) delete this.performance.hand; } else { - this.state = 'run:hand'; timeStamp = now(); handRes = this.config.hand.enabled ? await handpose.predict(img.tensor, this.config) : []; elapsedTime = Math.trunc(now() - timeStamp); @@ -512,12 +515,12 @@ export class Human { // run nanodet this.analyze('Start Object:'); + this.state = 'run:object'; if (this.config.async) { if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? nanodet.predict(img.tensor, this.config) : []; else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? centernet.predict(img.tensor, this.config) : []; if (this.performance.object) delete this.performance.object; } else { - this.state = 'run:object'; timeStamp = now(); if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(img.tensor, this.config) : []; else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(img.tensor, this.config) : []; @@ -527,9 +530,12 @@ export class Human { this.analyze('End Object:'); // if async wait for results + this.state = 'run:await'; + if (this.config.yield) await wait(1); if (this.config.async) [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); // run gesture analysis last + this.state = 'run:gesture'; let gestureRes: GestureResult[] = []; if (this.config.gesture.enabled) { timeStamp = now(); @@ -539,7 +545,6 @@ export class Human { } this.performance.total = Math.trunc(now() - timeStart); - this.state = 'idle'; const shape = this.process?.tensor?.shape || []; this.result = { face: faceRes as FaceResult[], @@ -558,6 +563,7 @@ export class Human { // log('Result:', result); this.emit('detect'); + this.state = 'idle'; resolve(this.result); }); } diff --git a/src/image/image.ts b/src/image/image.ts index 08e35867..9c0b9fa0 100644 --- a/src/image/image.ts +++ b/src/image/image.ts @@ -21,7 +21,7 @@ let fx: fxImage.GLImageFilter | null; // instance of imagefx export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas { let c; if (env.browser) { - if (typeof OffscreenCanvas !== 'undefined') { + if (env.offscreen) { c = new OffscreenCanvas(width, height); } else { c = document.createElement('canvas'); @@ -63,6 +63,7 @@ export function process(input: Input, config: Config): { tensor: Tensor | null, } if (input instanceof tf.Tensor) { // if input is tensor, use as-is + if (input.isDisposed) throw new Error('input tensor is disposed'); if ((input as unknown as Tensor).shape && (input as unknown as Tensor).shape.length === 4 && (input as unknown as Tensor).shape[0] === 1 && (input as unknown as Tensor).shape[3] === 3) tensor = tf.clone(input); else throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${(input as unknown as Tensor).shape}`); } else { diff --git a/src/tfjs/humangl.ts b/src/tfjs/humangl.ts index b4c214d5..f4d5fb53 100644 --- a/src/tfjs/humangl.ts +++ b/src/tfjs/humangl.ts @@ -45,6 +45,7 @@ function extensions(): void { */ export async function register(instance): Promise { // force backend reload if gl context is not valid + if (instance.config.backend !== 'humangl') return; if ((config.name in tf.engine().registry) && (!config.gl || !config.gl.getParameter(config.gl.VERSION))) { log('error: humangl backend invalid context'); models.reset(instance); @@ -95,11 +96,12 @@ export async function register(instance): Promise { log('error: cannot set WebGL context:', err); return; } - const current = tf.backend().getGPGPUContext().gl; + const current = tf.backend().getGPGPUContext ? tf.backend().getGPGPUContext().gl : null; if (current) { log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`); } else { - log('error: no current context:', current, config.gl); + log('error: no current gl context:', current, config.gl); + return; } try { const ctx = new tf.GPGPUContext(config.gl); diff --git a/test/browser.html b/test/browser.html new file mode 100644 index 00000000..415dd2aa --- /dev/null +++ b/test/browser.html @@ -0,0 +1,146 @@ + + + + Human Browser Tests + + + + + + + + + + + +

+    
+
+ + + diff --git a/test/test.js b/test/node.js similarity index 100% rename from test/test.js rename to test/node.js