refactor human.env to a class type

pull/293/head
Vladimir Mandic 2021-10-21 10:26:44 -04:00
parent 7f87d2633a
commit 01316a4c2d
7 changed files with 173 additions and 192 deletions

View File

@ -89,7 +89,7 @@ const ui = {
autoPlay: false, // start webcam & detection on load autoPlay: false, // start webcam & detection on load
// internal variables // internal variables
exceptionHandler: true, // should capture all unhandled exceptions exceptionHandler: false, // should capture all unhandled exceptions
busy: false, // internal camera busy flag busy: false, // internal camera busy flag
menuWidth: 0, // internal menuWidth: 0, // internal
menuHeight: 0, // internal menuHeight: 0, // internal
@ -168,13 +168,15 @@ function log(...msg) {
if (ui.console) console.log(ts, ...msg); if (ui.console) console.log(ts, ...msg);
} }
let prevStatus = '';
function status(msg) { function status(msg) {
const div = document.getElementById('status'); const div = document.getElementById('status');
if (div && msg && msg.length > 0) { if (div && msg && msg !== prevStatus && msg.length > 0) {
log('status', msg); log('status', msg);
document.getElementById('play').style.display = 'none'; document.getElementById('play').style.display = 'none';
document.getElementById('loader').style.display = 'block'; document.getElementById('loader').style.display = 'block';
div.innerText = msg; div.innerText = msg;
prevStatus = msg;
} else { } else {
const video = document.getElementById('video'); const video = document.getElementById('video');
const playing = (video.srcObject !== null) && !video.paused; const playing = (video.srcObject !== null) && !video.paused;

View File

@ -5,6 +5,7 @@
// module imports // module imports
import { log, now, mergeDeep, validate } from './util/util'; import { log, now, mergeDeep, validate } from './util/util';
import { defaults } from './config'; import { defaults } from './config';
import { env, Env } from './util/env';
import * as tf from '../dist/tfjs.esm.js'; import * as tf from '../dist/tfjs.esm.js';
import * as app from '../package.json'; import * as app from '../package.json';
import * as backend from './tfjs/backend'; import * as backend from './tfjs/backend';
@ -12,7 +13,6 @@ import * as blazepose from './body/blazepose';
import * as centernet from './object/centernet'; import * as centernet from './object/centernet';
import * as draw from './util/draw'; import * as draw from './util/draw';
import * as efficientpose from './body/efficientpose'; import * as efficientpose from './body/efficientpose';
import * as env from './util/env';
import * as face from './face/face'; import * as face from './face/face';
import * as facemesh from './face/facemesh'; import * as facemesh from './face/facemesh';
import * as faceres from './face/faceres'; import * as faceres from './face/faceres';
@ -125,7 +125,7 @@ export class Human {
tf: TensorFlow; tf: TensorFlow;
/** Object containing environment information used for diagnostics */ /** Object containing environment information used for diagnostics */
env: env.Env; env: Env;
/** Draw helper classes that can draw detected objects on canvas using specified draw /** Draw helper classes that can draw detected objects on canvas using specified draw
* - options: {@link DrawOptions} global settings for all draw operations, can be overriden for each draw method * - options: {@link DrawOptions} global settings for all draw operations, can be overriden for each draw method
@ -174,11 +174,10 @@ export class Human {
* @return instance: {@link Human} * @return instance: {@link Human}
*/ */
constructor(userConfig?: Partial<Config>) { constructor(userConfig?: Partial<Config>) {
env.get(); this.env = env;
this.env = env.env;
defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`; defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`;
defaults.modelBasePath = this.env.browser ? '../models/' : 'file://models/'; defaults.modelBasePath = env.browser ? '../models/' : 'file://models/';
defaults.backend = this.env.browser ? 'humangl' : 'tensorflow'; defaults.backend = env.browser ? 'humangl' : 'tensorflow';
this.version = app.version; // expose version property on instance of class this.version = app.version; // expose version property on instance of class
Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself
this.config = JSON.parse(JSON.stringify(defaults)); this.config = JSON.parse(JSON.stringify(defaults));
@ -311,7 +310,6 @@ export class Human {
async init(): Promise<void> { async init(): Promise<void> {
await backend.check(this, true); await backend.check(this, true);
await this.tf.ready(); await this.tf.ready();
env.set(this.env);
} }
/** Load method preloads all configured models on-demand /** Load method preloads all configured models on-demand
@ -326,7 +324,7 @@ export class Human {
const count = Object.values(this.models).filter((model) => model).length; const count = Object.values(this.models).filter((model) => model).length;
if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config; if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config;
if (env.env.initial) { // print version info on first run and check for correct backend setup if (env.initial) { // print version info on first run and check for correct backend setup
if (this.config.debug) log(`version: ${this.version}`); if (this.config.debug) log(`version: ${this.version}`);
if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`); if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`);
if (!await backend.check(this)) log('error: backend check failed'); if (!await backend.check(this)) log('error: backend check failed');
@ -338,8 +336,8 @@ export class Human {
} }
await models.load(this); // actually loads models await models.load(this); // actually loads models
if (env.env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run if (env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run
env.env.initial = false; env.initial = false;
const loaded = Object.values(this.models).filter((model) => model).length; const loaded = Object.values(this.models).filter((model) => model).length;
if (loaded !== count) { // number of loaded models changed if (loaded !== count) { // number of loaded models changed

View File

@ -10,16 +10,17 @@ import { env } from '../util/env';
import { log, now } from '../util/util'; import { log, now } from '../util/util';
export type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas | typeof Image | typeof env.Canvas; export type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas | typeof Image | typeof env.Canvas;
export type AnyCanvas = HTMLCanvasElement | OffscreenCanvas;
const maxSize = 2048; const maxSize = 2048;
// internal temp canvases // internal temp canvases
let inCanvas: HTMLCanvasElement | OffscreenCanvas | null = null; // use global variable to avoid recreating canvas on each frame let inCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame
let outCanvas: HTMLCanvasElement | OffscreenCanvas | null = null; // use global variable to avoid recreating canvas on each frame let outCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame
let tmpCanvas: HTMLCanvasElement | OffscreenCanvas | null = null; // use global variable to avoid recreating canvas on each frame let tmpCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame
// @ts-ignore // imagefx is js module that should be converted to a class // @ts-ignore // imagefx is js module that should be converted to a class
let fx: fxImage.GLImageFilter | null; // instance of imagefx let fx: fxImage.GLImageFilter | null; // instance of imagefx
export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas { export function canvas(width, height): AnyCanvas {
let c; let c;
if (env.browser) { if (env.browser) {
if (env.offscreen) { if (env.offscreen) {
@ -39,7 +40,7 @@ export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas {
return c; return c;
} }
export function copy(input: HTMLCanvasElement | OffscreenCanvas, output?: HTMLCanvasElement | OffscreenCanvas) { export function copy(input: AnyCanvas, output?: AnyCanvas) {
const outputCanvas = output || canvas(input.width, input.height); const outputCanvas = output || canvas(input.width, input.height);
const ctx = outputCanvas.getContext('2d') as CanvasRenderingContext2D; const ctx = outputCanvas.getContext('2d') as CanvasRenderingContext2D;
ctx.drawImage(input, 0, 0); ctx.drawImage(input, 0, 0);
@ -49,7 +50,7 @@ export function copy(input: HTMLCanvasElement | OffscreenCanvas, output?: HTMLCa
// process input image and return tensor // process input image and return tensor
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement // input can be tensor, imagedata, htmlimageelement, htmlvideoelement
// input is resized and run through imagefx filter // input is resized and run through imagefx filter
export function process(input: Input, config: Config, getTensor: boolean = true): { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null } { export function process(input: Input, config: Config, getTensor: boolean = true): { tensor: Tensor | null, canvas: AnyCanvas | null } {
if (!input) { if (!input) {
// throw new Error('input is missing'); // throw new Error('input is missing');
if (config.debug) log('input is missing'); if (config.debug) log('input is missing');
@ -119,10 +120,10 @@ export function process(input: Input, config: Config, getTensor: boolean = true)
if (config.filter.flip && typeof inCtx.translate !== 'undefined') { if (config.filter.flip && typeof inCtx.translate !== 'undefined') {
inCtx.translate(originalWidth, 0); inCtx.translate(originalWidth, 0);
inCtx.scale(-1, 1); inCtx.scale(-1, 1);
inCtx.drawImage(input as OffscreenCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height); inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults
} else { } else {
inCtx.drawImage(input as OffscreenCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height); inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
} }
} }
@ -130,7 +131,7 @@ export function process(input: Input, config: Config, getTensor: boolean = true)
// imagefx transforms using gl from input canvas to output canvas // imagefx transforms using gl from input canvas to output canvas
if (config.filter.enabled && env.webgl.supported) { if (config.filter.enabled && env.webgl.supported) {
if (!fx) fx = env.browser ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined') if (!fx) fx = env.browser ? new fxImage.GLImageFilter() : null; // && (typeof document !== 'undefined')
env.filter = !!fx; env.filter = !!fx;
if (!fx) return { tensor: null, canvas: inCanvas }; if (!fx) return { tensor: null, canvas: inCanvas };
fx.reset(); fx.reset();

View File

@ -4,6 +4,7 @@
*/ */
import * as shaders from './imagefxshaders'; import * as shaders from './imagefxshaders';
import { canvas } from './image';
const collect = (source, prefix, collection) => { const collect = (source, prefix, collection) => {
const r = new RegExp('\\b' + prefix + ' \\w+ (\\w+)', 'ig'); const r = new RegExp('\\b' + prefix + ' \\w+ (\\w+)', 'ig');
@ -51,9 +52,9 @@ class GLProgram {
* @property {function} add add specified filter to filter chain * @property {function} add add specified filter to filter chain
* @property {function} apply execute filter chain and draw result * @property {function} apply execute filter chain and draw result
* @property {function} draw just draw input to result * @property {function} draw just draw input to result
* @param {HTMLCanvasElement | OffscreenCanvas} canvas use specific canvas for all webgl bindings
*/ */
export function GLImageFilter(params = {}) {
export function GLImageFilter() {
let drawCount = 0; let drawCount = 0;
let sourceTexture: WebGLTexture | null = null; let sourceTexture: WebGLTexture | null = null;
let lastInChain = false; let lastInChain = false;
@ -62,16 +63,16 @@ export function GLImageFilter(params = {}) {
let filterChain: Record<string, unknown>[] = []; let filterChain: Record<string, unknown>[] = [];
let vertexBuffer: WebGLBuffer | null = null; let vertexBuffer: WebGLBuffer | null = null;
let currentProgram: GLProgram | null = null; let currentProgram: GLProgram | null = null;
const canvas = params['canvas'] || typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(100, 100) : document.createElement('canvas'); const fxcanvas = canvas(100, 100);
const shaderProgramCache = { }; // key is the shader program source, value is the compiled program const shaderProgramCache = { }; // key is the shader program source, value is the compiled program
const DRAW = { INTERMEDIATE: 1 }; const DRAW = { INTERMEDIATE: 1 };
const gl = canvas.getContext('webgl') as WebGLRenderingContext; const gl = fxcanvas.getContext('webgl') as WebGLRenderingContext;
if (!gl) throw new Error('filter: cannot get webgl context'); if (!gl) throw new Error('filter: cannot get webgl context');
function resize(width, height) { function resize(width, height) {
if (width === canvas.width && height === canvas.height) return; // Same width/height? Nothing to do here if (width === fxcanvas.width && height === fxcanvas.height) return; // Same width/height? Nothing to do here
canvas.width = width; fxcanvas.width = width;
canvas.height = height; fxcanvas.height = height;
if (!vertexBuffer) { // Create the context if we don't have it yet if (!vertexBuffer) { // Create the context if we don't have it yet
const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); // Create the vertex buffer for the two triangles [x, y, u, v] * 6 const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); // Create the vertex buffer for the two triangles [x, y, u, v] * 6
vertexBuffer = gl.createBuffer(); vertexBuffer = gl.createBuffer();
@ -79,7 +80,7 @@ export function GLImageFilter(params = {}) {
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true); gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
} }
gl.viewport(0, 0, canvas.width, canvas.height); gl.viewport(0, 0, fxcanvas.width, fxcanvas.height);
tempFramebuffers = [null, null]; // Delete old temp framebuffers tempFramebuffers = [null, null]; // Delete old temp framebuffers
} }
@ -102,7 +103,7 @@ export function GLImageFilter(params = {}) {
} }
function getTempFramebuffer(index) { function getTempFramebuffer(index) {
tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(canvas.width, canvas.height); tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(fxcanvas.width, fxcanvas.height);
return tempFramebuffers[index]; return tempFramebuffers[index];
} }
@ -288,8 +289,8 @@ export function GLImageFilter(params = {}) {
convolution: (matrix) => { // general convolution Filter convolution: (matrix) => { // general convolution Filter
const m = new Float32Array(matrix); const m = new Float32Array(matrix);
const pixelSizeX = 1 / canvas.width; const pixelSizeX = 1 / fxcanvas.width;
const pixelSizeY = 1 / canvas.height; const pixelSizeY = 1 / fxcanvas.height;
const program = compileShader(shaders.convolution); const program = compileShader(shaders.convolution);
gl.uniform1fv(program?.uniform['m'], m); gl.uniform1fv(program?.uniform['m'], m);
gl.uniform2f(program?.uniform['px'], pixelSizeX, pixelSizeY); gl.uniform2f(program?.uniform['px'], pixelSizeX, pixelSizeY);
@ -344,8 +345,8 @@ export function GLImageFilter(params = {}) {
}, },
blur: (size) => { blur: (size) => {
const blurSizeX = (size / 7) / canvas.width; const blurSizeX = (size / 7) / fxcanvas.width;
const blurSizeY = (size / 7) / canvas.height; const blurSizeY = (size / 7) / fxcanvas.height;
const program = compileShader(shaders.blur); const program = compileShader(shaders.blur);
// Vertical // Vertical
gl.uniform2f(program?.uniform['px'], 0, blurSizeY); gl.uniform2f(program?.uniform['px'], 0, blurSizeY);
@ -356,8 +357,8 @@ export function GLImageFilter(params = {}) {
}, },
pixelate: (size) => { pixelate: (size) => {
const blurSizeX = (size) / canvas.width; const blurSizeX = (size) / fxcanvas.width;
const blurSizeY = (size) / canvas.height; const blurSizeY = (size) / fxcanvas.height;
const program = compileShader(shaders.pixelate); const program = compileShader(shaders.pixelate);
gl.uniform2f(program?.uniform['size'], blurSizeX, blurSizeY); gl.uniform2f(program?.uniform['size'], blurSizeX, blurSizeY);
draw(); draw();
@ -399,7 +400,7 @@ export function GLImageFilter(params = {}) {
// @ts-ignore function assigment // @ts-ignore function assigment
f.func.apply(this, f.args || []); f.func.apply(this, f.args || []);
} }
return canvas; return fxcanvas;
}; };
// @ts-ignore this // @ts-ignore this

View File

@ -1,13 +1,13 @@
/** TFJS backend initialization and customization */ /** TFJS backend initialization and customization */
import { log, now } from '../util/util'; import { log, now } from '../util/util';
import { env } from '../util/env';
import * as humangl from './humangl'; import * as humangl from './humangl';
import * as env from '../util/env';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
export async function check(instance, force = false) { export async function check(instance, force = false) {
instance.state = 'backend'; instance.state = 'backend';
if (force || env.env.initial || (instance.config.backend && (instance.config.backend.length > 0) && (tf.getBackend() !== instance.config.backend))) { if (force || env.initial || (instance.config.backend && (instance.config.backend.length > 0) && (tf.getBackend() !== instance.config.backend))) {
const timeStamp = now(); const timeStamp = now();
if (instance.config.backend && instance.config.backend.length > 0) { if (instance.config.backend && instance.config.backend.length > 0) {
@ -18,17 +18,17 @@ export async function check(instance, force = false) {
} }
// force browser vs node backend // force browser vs node backend
if (env.env.browser && instance.config.backend === 'tensorflow') { if (env.browser && instance.config.backend === 'tensorflow') {
if (instance.config.debug) log('override: backend set to tensorflow while running in browser'); if (instance.config.debug) log('override: backend set to tensorflow while running in browser');
instance.config.backend = 'humangl'; instance.config.backend = 'humangl';
} }
if (env.env.node && (instance.config.backend === 'webgl' || instance.config.backend === 'humangl')) { if (env.node && (instance.config.backend === 'webgl' || instance.config.backend === 'humangl')) {
if (instance.config.debug) log(`override: backend set to ${instance.config.backend} while running in nodejs`); if (instance.config.debug) log(`override: backend set to ${instance.config.backend} while running in nodejs`);
instance.config.backend = 'tensorflow'; instance.config.backend = 'tensorflow';
} }
// handle webgpu // handle webgpu
if (env.env.browser && instance.config.backend === 'webgpu') { if (env.browser && instance.config.backend === 'webgpu') {
if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') { if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') {
log('override: backend set to webgpu but browser does not support webgpu'); log('override: backend set to webgpu but browser does not support webgpu');
instance.config.backend = 'humangl'; instance.config.backend = 'humangl';
@ -45,7 +45,7 @@ export async function check(instance, force = false) {
if (!available.includes(instance.config.backend)) { if (!available.includes(instance.config.backend)) {
log(`error: backend ${instance.config.backend} not found in registry`); log(`error: backend ${instance.config.backend} not found in registry`);
instance.config.backend = env.env.node ? 'tensorflow' : 'webgl'; instance.config.backend = env.node ? 'tensorflow' : 'webgl';
if (instance.config.debug) log(`override: setting backend ${instance.config.backend}`); if (instance.config.debug) log(`override: setting backend ${instance.config.backend}`);
} }
@ -75,7 +75,7 @@ export async function check(instance, force = false) {
if (tf.getBackend() === 'humangl') { if (tf.getBackend() === 'humangl') {
tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false); tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
tf.ENV.set('WEBGL_CPU_FORWARD', true); tf.ENV.set('WEBGL_CPU_FORWARD', true);
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', false); // tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', false);
tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true); tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true);
tf.ENV.set('CPU_HANDOFF_SIZE_THRESHOLD', 256); tf.ENV.set('CPU_HANDOFF_SIZE_THRESHOLD', 256);
// if (!instance.config.object.enabled) tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision // if (!instance.config.object.enabled) tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
@ -91,9 +91,9 @@ export async function check(instance, force = false) {
// customize webgpu // customize webgpu
if (tf.getBackend() === 'webgpu') { if (tf.getBackend() === 'webgpu') {
tf.ENV.set('WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD', 512); // tf.ENV.set('WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD', 512);
tf.ENV.set('WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE', 0); // tf.ENV.set('WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE', 0);
tf.ENV.set('WEBGPU_CPU_FORWARD', true); // tf.ENV.set('WEBGPU_CPU_FORWARD', true);
} }
// wait for ready // wait for ready
@ -102,8 +102,7 @@ export async function check(instance, force = false) {
instance.performance.backend = Math.trunc(now() - timeStamp); instance.performance.backend = Math.trunc(now() - timeStamp);
instance.config.backend = tf.getBackend(); instance.config.backend = tf.getBackend();
env.get(); // update env on backend init env.updateBackend(); // update env on backend init
instance.env = env.env;
} }
return true; return true;
} }
@ -121,5 +120,5 @@ export function fakeOps(kernelNames: Array<string>, config) {
}; };
tf.registerKernel(kernelConfig); tf.registerKernel(kernelConfig);
} }
env.env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); // re-scan registered ops env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); // re-scan registered ops
} }

View File

@ -1,100 +1,145 @@
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as image from '../image/image'; import * as image from '../image/image';
import { mergeDeep } from './util';
export type Env = { /** Env class that holds detected capabilities */
export class Env {
/** Running in Browser */ /** Running in Browser */
browser: undefined | boolean, browser: boolean;
/** Running in NodeJS */ /** Running in NodeJS */
node: undefined | boolean, node: boolean;
/** Running in WebWorker thread */ /** Running in WebWorker thread */
worker: undefined | boolean, worker: boolean;
/** Detected platform */ /** Detected platform */
platform: undefined | string, platform: string = '';
/** Detected agent */ /** Detected agent */
agent: undefined | string, agent: string = '';
/** List of supported backends */ /** List of supported backends */
backends: string[], backends: string[] = [];
/** Has any work been performed so far */ /** Has any work been performed so far */
initial: boolean, initial: boolean;
/** Are image filters supported? */ /** Are image filters supported? */
filter: undefined | boolean, filter: boolean | undefined;
/** TFJS instance details */ /** TFJS instance details */
tfjs: { tfjs: {
version: undefined | string, version: undefined | string,
}, };
/** Is offscreenCanvas supported? */ /** Is offscreenCanvas supported? */
offscreen: undefined | boolean, offscreen: undefined | boolean;
/** WASM detected capabilities */ /** WASM detected capabilities */
wasm: { wasm: {
supported: undefined | boolean, supported: undefined | boolean,
backend: undefined | boolean, backend: undefined | boolean,
simd: undefined | boolean, simd: undefined | boolean,
multithread: undefined | boolean, multithread: undefined | boolean,
}, } = {
supported: undefined,
backend: undefined,
simd: undefined,
multithread: undefined,
};
/** WebGL detected capabilities */ /** WebGL detected capabilities */
webgl: { webgl: {
supported: undefined | boolean, supported: undefined | boolean,
backend: undefined | boolean, backend: undefined | boolean,
version: undefined | string, version: undefined | string,
renderer: undefined | string, renderer: undefined | string,
}, } = {
supported: undefined,
backend: undefined,
version: undefined,
renderer: undefined,
};
/** WebGPU detected capabilities */ /** WebGPU detected capabilities */
webgpu: { webgpu: {
supported: undefined | boolean, supported: undefined | boolean,
backend: undefined | boolean, backend: undefined | boolean,
adapter: undefined | string, adapter: undefined | string,
}, } = {
/** List of supported kernels for current backend */
kernels: string[],
/** MonkeyPatch for Canvas */
Canvas: undefined,
/** MonkeyPatch for Image */
Image: undefined,
/** MonkeyPatch for ImageData */
ImageData: undefined,
}
// eslint-disable-next-line import/no-mutable-exports
export let env: Env = {
browser: undefined,
node: undefined,
worker: undefined,
platform: undefined,
agent: undefined,
initial: true,
backends: [],
offscreen: undefined,
filter: undefined,
tfjs: {
version: undefined,
},
wasm: {
supported: undefined,
backend: undefined,
simd: undefined,
multithread: undefined,
},
webgl: {
supported: undefined,
backend: undefined,
version: undefined,
renderer: undefined,
},
webgpu: {
supported: undefined, supported: undefined,
backend: undefined, backend: undefined,
adapter: undefined, adapter: undefined,
},
kernels: [],
Canvas: undefined,
Image: undefined,
ImageData: undefined,
}; };
/** CPU info */
cpu: {
model: undefined | string,
flags: string[],
} = {
model: undefined,
flags: [],
};
/** List of supported kernels for current backend */
kernels: string[] = [];
/** MonkeyPatch for Canvas */
Canvas: undefined;
/** MonkeyPatch for Image */
Image: undefined;
/** MonkeyPatch for ImageData */
ImageData: undefined;
export async function cpuInfo() { constructor() {
this.browser = typeof navigator !== 'undefined';
this.node = typeof process !== 'undefined';
this.tfjs = { version: tf.version_core };
this.offscreen = typeof OffscreenCanvas !== 'undefined';
this.initial = true;
// @ts-ignore WorkerGlobalScope evaluated in browser only
this.worker = this.browser && this.offscreen ? (typeof WorkerGlobalScope !== 'undefined') : undefined;
if (typeof navigator !== 'undefined') {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
this.platform = (platformMatch && platformMatch[0]) ? platformMatch[0].replace(/\(|\)/g, '') : '';
this.agent = navigator.userAgent.replace(raw[0], '');
if (this.platform[1]) this.agent = this.agent.replace(raw[1], '');
this.agent = this.agent.replace(/ /g, ' ');
// chrome offscreencanvas gpu memory leak
/*
const isChrome = env.agent.match(/Chrome\/.[0-9]/g);
const verChrome = isChrome && isChrome[0] ? isChrome[0].split('/')[1] : 0;
if (verChrome > 92 && verChrome < 96) {
log('disabling offscreenCanvas due to browser error:', isChrome ? isChrome[0] : 'unknown');
this.offscreen = false;
}
*/
}
} else if (typeof process !== 'undefined') {
this.platform = `${process.platform} ${process.arch}`;
this.agent = `NodeJS ${process.version}`;
}
}
async updateBackend() {
// analyze backends
this.backends = Object.keys(tf.engine().registryFactory);
this.wasm.supported = typeof WebAssembly !== 'undefined';
this.wasm.backend = this.backends.includes('wasm');
if (this.wasm.supported && this.wasm.backend && tf.getBackend() === 'wasm') {
this.wasm.simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
this.wasm.multithread = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
}
const c = image.canvas(100, 100);
const ctx = c ? c.getContext('webgl2') : undefined; // causes too many gl contexts
// const ctx = typeof tf.backend().getGPGPUContext !== undefined ? tf.backend().getGPGPUContext : null;
this.webgl.supported = typeof ctx !== 'undefined';
this.webgl.backend = this.backends.includes('webgl');
if (this.webgl.supported && this.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {
// @ts-ignore getGPGPUContext only exists on WebGL backend
const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null;
if (gl) {
this.webgl.version = gl.getParameter(gl.VERSION);
this.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
this.webgpu.supported = this.browser && typeof navigator['gpu'] !== 'undefined';
this.webgpu.backend = this.backends.includes('webgpu');
if (this.webgpu.supported) this.webgpu.adapter = (await navigator['gpu'].requestAdapter())?.name;
// enumerate kernels
this.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async updateCPU() {
const cpu = { model: '', flags: [] }; const cpu = { model: '', flags: [] };
if (env.node && env.platform?.startsWith('linux')) { if (this.node && this.platform?.startsWith('linux')) {
// eslint-disable-next-line global-require // eslint-disable-next-line global-require
const fs = require('fs'); const fs = require('fs');
try { try {
@ -109,81 +154,9 @@ export async function cpuInfo() {
} }
} catch { /**/ } } catch { /**/ }
} }
if (!env['cpu']) Object.defineProperty(env, 'cpu', { value: cpu }); if (!this['cpu']) Object.defineProperty(this, 'cpu', { value: cpu });
else env['cpu'] = cpu; else this['cpu'] = cpu;
}
export async function backendInfo() {
// analyze backends
env.backends = Object.keys(tf.engine().registryFactory);
env.wasm.supported = typeof WebAssembly !== 'undefined';
env.wasm.backend = env.backends.includes('wasm');
if (env.wasm.supported && env.wasm.backend && tf.getBackend() === 'wasm') {
env.wasm.simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
env.wasm.multithread = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
}
const c = image.canvas(100, 100);
const ctx = c ? c.getContext('webgl2') : undefined; // causes too many gl contexts
// const ctx = typeof tf.backend().getGPGPUContext !== undefined ? tf.backend().getGPGPUContext : null;
env.webgl.supported = typeof ctx !== 'undefined';
env.webgl.backend = env.backends.includes('webgl');
if (env.webgl.supported && env.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {
// @ts-ignore getGPGPUContext only exists on WebGL backend
const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null;
if (gl) {
env.webgl.version = gl.getParameter(gl.VERSION);
env.webgl.renderer = gl.getParameter(gl.RENDERER);
} }
} }
env.webgpu.supported = env.browser && typeof navigator['gpu'] !== 'undefined'; export const env = new Env();
env.webgpu.backend = env.backends.includes('webgpu');
if (env.webgpu.supported) env.webgpu.adapter = (await navigator['gpu'].requestAdapter())?.name;
// enumerate kernels
env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
export async function get() {
env.browser = typeof navigator !== 'undefined';
env.node = typeof process !== 'undefined';
env.tfjs.version = tf.version_core;
// offscreencanvas supported?
env.offscreen = typeof env.offscreen === 'undefined' ? typeof OffscreenCanvas !== 'undefined' : env.offscreen;
// get platform and agent
if (typeof navigator !== 'undefined') {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
env.platform = (platformMatch && platformMatch[0]) ? platformMatch[0].replace(/\(|\)/g, '') : '';
env.agent = navigator.userAgent.replace(raw[0], '');
if (env.platform[1]) env.agent = env.agent.replace(raw[1], '');
env.agent = env.agent.replace(/ /g, ' ');
// chrome offscreencanvas gpu memory leak
/*
const isChrome = env.agent.match(/Chrome\/.[0-9]/g);
const verChrome = isChrome && isChrome[0] ? isChrome[0].split('/')[1] : 0;
if (verChrome > 0 && verChrome > 92 && verChrome < 96) {
log('disabling offscreenCanvas due to browser error:', isChrome ? isChrome[0] : 'unknown');
env.offscreen = false;
}
*/
}
} else if (typeof process !== 'undefined') {
env.platform = `${process.platform} ${process.arch}`;
env.agent = `NodeJS ${process.version}`;
}
// @ts-ignore WorkerGlobalScope evaluated in browser only
env.worker = env.browser && env.offscreen ? (typeof WorkerGlobalScope !== 'undefined') : undefined;
await backendInfo();
// get cpu info
// await cpuInfo();
}
export async function set(obj) {
env = mergeDeep(env, obj);
}

View File

@ -3,4 +3,11 @@
* @external * @external
*/ */
import * as tf from '../../tfjs/dist/tfjs.esm';
// eslint-disable-next-line import/export
export * from '../../tfjs/dist/tfjs.esm'; export * from '../../tfjs/dist/tfjs.esm';
// needs override
// eslint-disable-next-line import/export
export const version_core = tf.version['tfjs-core'];