mirror of https://github.com/vladmandic/human
refactor human.env to a class type
parent
962ef18e1c
commit
37672d6460
|
@ -89,7 +89,7 @@ const ui = {
|
|||
autoPlay: false, // start webcam & detection on load
|
||||
|
||||
// internal variables
|
||||
exceptionHandler: true, // should capture all unhandled exceptions
|
||||
exceptionHandler: false, // should capture all unhandled exceptions
|
||||
busy: false, // internal camera busy flag
|
||||
menuWidth: 0, // internal
|
||||
menuHeight: 0, // internal
|
||||
|
@ -168,13 +168,15 @@ function log(...msg) {
|
|||
if (ui.console) console.log(ts, ...msg);
|
||||
}
|
||||
|
||||
let prevStatus = '';
|
||||
function status(msg) {
|
||||
const div = document.getElementById('status');
|
||||
if (div && msg && msg.length > 0) {
|
||||
if (div && msg && msg !== prevStatus && msg.length > 0) {
|
||||
log('status', msg);
|
||||
document.getElementById('play').style.display = 'none';
|
||||
document.getElementById('loader').style.display = 'block';
|
||||
div.innerText = msg;
|
||||
prevStatus = msg;
|
||||
} else {
|
||||
const video = document.getElementById('video');
|
||||
const playing = (video.srcObject !== null) && !video.paused;
|
||||
|
|
18
src/human.ts
18
src/human.ts
|
@ -5,6 +5,7 @@
|
|||
// module imports
|
||||
import { log, now, mergeDeep, validate } from './util/util';
|
||||
import { defaults } from './config';
|
||||
import { env, Env } from './util/env';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as app from '../package.json';
|
||||
import * as backend from './tfjs/backend';
|
||||
|
@ -12,7 +13,6 @@ import * as blazepose from './body/blazepose';
|
|||
import * as centernet from './object/centernet';
|
||||
import * as draw from './util/draw';
|
||||
import * as efficientpose from './body/efficientpose';
|
||||
import * as env from './util/env';
|
||||
import * as face from './face/face';
|
||||
import * as facemesh from './face/facemesh';
|
||||
import * as faceres from './face/faceres';
|
||||
|
@ -125,7 +125,7 @@ export class Human {
|
|||
tf: TensorFlow;
|
||||
|
||||
/** Object containing environment information used for diagnostics */
|
||||
env: env.Env;
|
||||
env: Env;
|
||||
|
||||
/** Draw helper classes that can draw detected objects on canvas using specified draw
|
||||
* - options: {@link DrawOptions} global settings for all draw operations, can be overriden for each draw method
|
||||
|
@ -174,11 +174,10 @@ export class Human {
|
|||
* @return instance: {@link Human}
|
||||
*/
|
||||
constructor(userConfig?: Partial<Config>) {
|
||||
env.get();
|
||||
this.env = env.env;
|
||||
this.env = env;
|
||||
defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`;
|
||||
defaults.modelBasePath = this.env.browser ? '../models/' : 'file://models/';
|
||||
defaults.backend = this.env.browser ? 'humangl' : 'tensorflow';
|
||||
defaults.modelBasePath = env.browser ? '../models/' : 'file://models/';
|
||||
defaults.backend = env.browser ? 'humangl' : 'tensorflow';
|
||||
this.version = app.version; // expose version property on instance of class
|
||||
Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself
|
||||
this.config = JSON.parse(JSON.stringify(defaults));
|
||||
|
@ -311,7 +310,6 @@ export class Human {
|
|||
async init(): Promise<void> {
|
||||
await backend.check(this, true);
|
||||
await this.tf.ready();
|
||||
env.set(this.env);
|
||||
}
|
||||
|
||||
/** Load method preloads all configured models on-demand
|
||||
|
@ -326,7 +324,7 @@ export class Human {
|
|||
const count = Object.values(this.models).filter((model) => model).length;
|
||||
if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config;
|
||||
|
||||
if (env.env.initial) { // print version info on first run and check for correct backend setup
|
||||
if (env.initial) { // print version info on first run and check for correct backend setup
|
||||
if (this.config.debug) log(`version: ${this.version}`);
|
||||
if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`);
|
||||
if (!await backend.check(this)) log('error: backend check failed');
|
||||
|
@ -338,8 +336,8 @@ export class Human {
|
|||
}
|
||||
|
||||
await models.load(this); // actually loads models
|
||||
if (env.env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run
|
||||
env.env.initial = false;
|
||||
if (env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run
|
||||
env.initial = false;
|
||||
|
||||
const loaded = Object.values(this.models).filter((model) => model).length;
|
||||
if (loaded !== count) { // number of loaded models changed
|
||||
|
|
|
@ -10,16 +10,17 @@ import { env } from '../util/env';
|
|||
import { log, now } from '../util/util';
|
||||
|
||||
export type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas | typeof Image | typeof env.Canvas;
|
||||
export type AnyCanvas = HTMLCanvasElement | OffscreenCanvas;
|
||||
|
||||
const maxSize = 2048;
|
||||
// internal temp canvases
|
||||
let inCanvas: HTMLCanvasElement | OffscreenCanvas | null = null; // use global variable to avoid recreating canvas on each frame
|
||||
let outCanvas: HTMLCanvasElement | OffscreenCanvas | null = null; // use global variable to avoid recreating canvas on each frame
|
||||
let tmpCanvas: HTMLCanvasElement | OffscreenCanvas | null = null; // use global variable to avoid recreating canvas on each frame
|
||||
let inCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame
|
||||
let outCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame
|
||||
let tmpCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame
|
||||
// @ts-ignore // imagefx is js module that should be converted to a class
|
||||
let fx: fxImage.GLImageFilter | null; // instance of imagefx
|
||||
|
||||
export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas {
|
||||
export function canvas(width, height): AnyCanvas {
|
||||
let c;
|
||||
if (env.browser) {
|
||||
if (env.offscreen) {
|
||||
|
@ -39,7 +40,7 @@ export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas {
|
|||
return c;
|
||||
}
|
||||
|
||||
export function copy(input: HTMLCanvasElement | OffscreenCanvas, output?: HTMLCanvasElement | OffscreenCanvas) {
|
||||
export function copy(input: AnyCanvas, output?: AnyCanvas) {
|
||||
const outputCanvas = output || canvas(input.width, input.height);
|
||||
const ctx = outputCanvas.getContext('2d') as CanvasRenderingContext2D;
|
||||
ctx.drawImage(input, 0, 0);
|
||||
|
@ -49,7 +50,7 @@ export function copy(input: HTMLCanvasElement | OffscreenCanvas, output?: HTMLCa
|
|||
// process input image and return tensor
|
||||
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement
|
||||
// input is resized and run through imagefx filter
|
||||
export function process(input: Input, config: Config, getTensor: boolean = true): { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null } {
|
||||
export function process(input: Input, config: Config, getTensor: boolean = true): { tensor: Tensor | null, canvas: AnyCanvas | null } {
|
||||
if (!input) {
|
||||
// throw new Error('input is missing');
|
||||
if (config.debug) log('input is missing');
|
||||
|
@ -119,10 +120,10 @@ export function process(input: Input, config: Config, getTensor: boolean = true)
|
|||
if (config.filter.flip && typeof inCtx.translate !== 'undefined') {
|
||||
inCtx.translate(originalWidth, 0);
|
||||
inCtx.scale(-1, 1);
|
||||
inCtx.drawImage(input as OffscreenCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
|
||||
inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
|
||||
inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults
|
||||
} else {
|
||||
inCtx.drawImage(input as OffscreenCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
|
||||
inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -130,7 +131,7 @@ export function process(input: Input, config: Config, getTensor: boolean = true)
|
|||
|
||||
// imagefx transforms using gl from input canvas to output canvas
|
||||
if (config.filter.enabled && env.webgl.supported) {
|
||||
if (!fx) fx = env.browser ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
|
||||
if (!fx) fx = env.browser ? new fxImage.GLImageFilter() : null; // && (typeof document !== 'undefined')
|
||||
env.filter = !!fx;
|
||||
if (!fx) return { tensor: null, canvas: inCanvas };
|
||||
fx.reset();
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
*/
|
||||
|
||||
import * as shaders from './imagefxshaders';
|
||||
import { canvas } from './image';
|
||||
|
||||
const collect = (source, prefix, collection) => {
|
||||
const r = new RegExp('\\b' + prefix + ' \\w+ (\\w+)', 'ig');
|
||||
|
@ -51,9 +52,9 @@ class GLProgram {
|
|||
* @property {function} add add specified filter to filter chain
|
||||
* @property {function} apply execute filter chain and draw result
|
||||
* @property {function} draw just draw input to result
|
||||
* @param {HTMLCanvasElement | OffscreenCanvas} canvas use specific canvas for all webgl bindings
|
||||
*/
|
||||
export function GLImageFilter(params = {}) {
|
||||
|
||||
export function GLImageFilter() {
|
||||
let drawCount = 0;
|
||||
let sourceTexture: WebGLTexture | null = null;
|
||||
let lastInChain = false;
|
||||
|
@ -62,16 +63,16 @@ export function GLImageFilter(params = {}) {
|
|||
let filterChain: Record<string, unknown>[] = [];
|
||||
let vertexBuffer: WebGLBuffer | null = null;
|
||||
let currentProgram: GLProgram | null = null;
|
||||
const canvas = params['canvas'] || typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(100, 100) : document.createElement('canvas');
|
||||
const fxcanvas = canvas(100, 100);
|
||||
const shaderProgramCache = { }; // key is the shader program source, value is the compiled program
|
||||
const DRAW = { INTERMEDIATE: 1 };
|
||||
const gl = canvas.getContext('webgl') as WebGLRenderingContext;
|
||||
const gl = fxcanvas.getContext('webgl') as WebGLRenderingContext;
|
||||
if (!gl) throw new Error('filter: cannot get webgl context');
|
||||
|
||||
function resize(width, height) {
|
||||
if (width === canvas.width && height === canvas.height) return; // Same width/height? Nothing to do here
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
if (width === fxcanvas.width && height === fxcanvas.height) return; // Same width/height? Nothing to do here
|
||||
fxcanvas.width = width;
|
||||
fxcanvas.height = height;
|
||||
if (!vertexBuffer) { // Create the context if we don't have it yet
|
||||
const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); // Create the vertex buffer for the two triangles [x, y, u, v] * 6
|
||||
vertexBuffer = gl.createBuffer();
|
||||
|
@ -79,7 +80,7 @@ export function GLImageFilter(params = {}) {
|
|||
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
|
||||
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
|
||||
}
|
||||
gl.viewport(0, 0, canvas.width, canvas.height);
|
||||
gl.viewport(0, 0, fxcanvas.width, fxcanvas.height);
|
||||
tempFramebuffers = [null, null]; // Delete old temp framebuffers
|
||||
}
|
||||
|
||||
|
@ -102,7 +103,7 @@ export function GLImageFilter(params = {}) {
|
|||
}
|
||||
|
||||
function getTempFramebuffer(index) {
|
||||
tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(canvas.width, canvas.height);
|
||||
tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(fxcanvas.width, fxcanvas.height);
|
||||
return tempFramebuffers[index];
|
||||
}
|
||||
|
||||
|
@ -288,8 +289,8 @@ export function GLImageFilter(params = {}) {
|
|||
|
||||
convolution: (matrix) => { // general convolution Filter
|
||||
const m = new Float32Array(matrix);
|
||||
const pixelSizeX = 1 / canvas.width;
|
||||
const pixelSizeY = 1 / canvas.height;
|
||||
const pixelSizeX = 1 / fxcanvas.width;
|
||||
const pixelSizeY = 1 / fxcanvas.height;
|
||||
const program = compileShader(shaders.convolution);
|
||||
gl.uniform1fv(program?.uniform['m'], m);
|
||||
gl.uniform2f(program?.uniform['px'], pixelSizeX, pixelSizeY);
|
||||
|
@ -344,8 +345,8 @@ export function GLImageFilter(params = {}) {
|
|||
},
|
||||
|
||||
blur: (size) => {
|
||||
const blurSizeX = (size / 7) / canvas.width;
|
||||
const blurSizeY = (size / 7) / canvas.height;
|
||||
const blurSizeX = (size / 7) / fxcanvas.width;
|
||||
const blurSizeY = (size / 7) / fxcanvas.height;
|
||||
const program = compileShader(shaders.blur);
|
||||
// Vertical
|
||||
gl.uniform2f(program?.uniform['px'], 0, blurSizeY);
|
||||
|
@ -356,8 +357,8 @@ export function GLImageFilter(params = {}) {
|
|||
},
|
||||
|
||||
pixelate: (size) => {
|
||||
const blurSizeX = (size) / canvas.width;
|
||||
const blurSizeY = (size) / canvas.height;
|
||||
const blurSizeX = (size) / fxcanvas.width;
|
||||
const blurSizeY = (size) / fxcanvas.height;
|
||||
const program = compileShader(shaders.pixelate);
|
||||
gl.uniform2f(program?.uniform['size'], blurSizeX, blurSizeY);
|
||||
draw();
|
||||
|
@ -399,7 +400,7 @@ export function GLImageFilter(params = {}) {
|
|||
// @ts-ignore function assigment
|
||||
f.func.apply(this, f.args || []);
|
||||
}
|
||||
return canvas;
|
||||
return fxcanvas;
|
||||
};
|
||||
|
||||
// @ts-ignore this
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
/** TFJS backend initialization and customization */
|
||||
|
||||
import { log, now } from '../util/util';
|
||||
import { env } from '../util/env';
|
||||
import * as humangl from './humangl';
|
||||
import * as env from '../util/env';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
export async function check(instance, force = false) {
|
||||
instance.state = 'backend';
|
||||
if (force || env.env.initial || (instance.config.backend && (instance.config.backend.length > 0) && (tf.getBackend() !== instance.config.backend))) {
|
||||
if (force || env.initial || (instance.config.backend && (instance.config.backend.length > 0) && (tf.getBackend() !== instance.config.backend))) {
|
||||
const timeStamp = now();
|
||||
|
||||
if (instance.config.backend && instance.config.backend.length > 0) {
|
||||
|
@ -18,17 +18,17 @@ export async function check(instance, force = false) {
|
|||
}
|
||||
|
||||
// force browser vs node backend
|
||||
if (env.env.browser && instance.config.backend === 'tensorflow') {
|
||||
if (env.browser && instance.config.backend === 'tensorflow') {
|
||||
if (instance.config.debug) log('override: backend set to tensorflow while running in browser');
|
||||
instance.config.backend = 'humangl';
|
||||
}
|
||||
if (env.env.node && (instance.config.backend === 'webgl' || instance.config.backend === 'humangl')) {
|
||||
if (env.node && (instance.config.backend === 'webgl' || instance.config.backend === 'humangl')) {
|
||||
if (instance.config.debug) log(`override: backend set to ${instance.config.backend} while running in nodejs`);
|
||||
instance.config.backend = 'tensorflow';
|
||||
}
|
||||
|
||||
// handle webgpu
|
||||
if (env.env.browser && instance.config.backend === 'webgpu') {
|
||||
if (env.browser && instance.config.backend === 'webgpu') {
|
||||
if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') {
|
||||
log('override: backend set to webgpu but browser does not support webgpu');
|
||||
instance.config.backend = 'humangl';
|
||||
|
@ -45,7 +45,7 @@ export async function check(instance, force = false) {
|
|||
|
||||
if (!available.includes(instance.config.backend)) {
|
||||
log(`error: backend ${instance.config.backend} not found in registry`);
|
||||
instance.config.backend = env.env.node ? 'tensorflow' : 'webgl';
|
||||
instance.config.backend = env.node ? 'tensorflow' : 'webgl';
|
||||
if (instance.config.debug) log(`override: setting backend ${instance.config.backend}`);
|
||||
}
|
||||
|
||||
|
@ -75,7 +75,7 @@ export async function check(instance, force = false) {
|
|||
if (tf.getBackend() === 'humangl') {
|
||||
tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
|
||||
tf.ENV.set('WEBGL_CPU_FORWARD', true);
|
||||
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', false);
|
||||
// tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', false);
|
||||
tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true);
|
||||
tf.ENV.set('CPU_HANDOFF_SIZE_THRESHOLD', 256);
|
||||
// if (!instance.config.object.enabled) tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
|
||||
|
@ -91,9 +91,9 @@ export async function check(instance, force = false) {
|
|||
|
||||
// customize webgpu
|
||||
if (tf.getBackend() === 'webgpu') {
|
||||
tf.ENV.set('WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD', 512);
|
||||
tf.ENV.set('WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE', 0);
|
||||
tf.ENV.set('WEBGPU_CPU_FORWARD', true);
|
||||
// tf.ENV.set('WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD', 512);
|
||||
// tf.ENV.set('WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE', 0);
|
||||
// tf.ENV.set('WEBGPU_CPU_FORWARD', true);
|
||||
}
|
||||
|
||||
// wait for ready
|
||||
|
@ -102,8 +102,7 @@ export async function check(instance, force = false) {
|
|||
instance.performance.backend = Math.trunc(now() - timeStamp);
|
||||
instance.config.backend = tf.getBackend();
|
||||
|
||||
env.get(); // update env on backend init
|
||||
instance.env = env.env;
|
||||
env.updateBackend(); // update env on backend init
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -121,5 +120,5 @@ export function fakeOps(kernelNames: Array<string>, config) {
|
|||
};
|
||||
tf.registerKernel(kernelConfig);
|
||||
}
|
||||
env.env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); // re-scan registered ops
|
||||
env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); // re-scan registered ops
|
||||
}
|
||||
|
|
257
src/util/env.ts
257
src/util/env.ts
|
@ -1,189 +1,162 @@
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as image from '../image/image';
|
||||
import { mergeDeep } from './util';
|
||||
|
||||
export type Env = {
|
||||
/** Env class that holds detected capabilities */
|
||||
export class Env {
|
||||
/** Running in Browser */
|
||||
browser: undefined | boolean,
|
||||
browser: boolean;
|
||||
/** Running in NodeJS */
|
||||
node: undefined | boolean,
|
||||
node: boolean;
|
||||
/** Running in WebWorker thread */
|
||||
worker: undefined | boolean,
|
||||
worker: boolean;
|
||||
/** Detected platform */
|
||||
platform: undefined | string,
|
||||
platform: string = '';
|
||||
/** Detected agent */
|
||||
agent: undefined | string,
|
||||
agent: string = '';
|
||||
/** List of supported backends */
|
||||
backends: string[],
|
||||
backends: string[] = [];
|
||||
/** Has any work been performed so far */
|
||||
initial: boolean,
|
||||
initial: boolean;
|
||||
/** Are image filters supported? */
|
||||
filter: undefined | boolean,
|
||||
filter: boolean | undefined;
|
||||
/** TFJS instance details */
|
||||
tfjs: {
|
||||
version: undefined | string,
|
||||
},
|
||||
};
|
||||
/** Is offscreenCanvas supported? */
|
||||
offscreen: undefined | boolean,
|
||||
offscreen: undefined | boolean;
|
||||
/** WASM detected capabilities */
|
||||
wasm: {
|
||||
supported: undefined | boolean,
|
||||
backend: undefined | boolean,
|
||||
simd: undefined | boolean,
|
||||
multithread: undefined | boolean,
|
||||
},
|
||||
} = {
|
||||
supported: undefined,
|
||||
backend: undefined,
|
||||
simd: undefined,
|
||||
multithread: undefined,
|
||||
};
|
||||
/** WebGL detected capabilities */
|
||||
webgl: {
|
||||
supported: undefined | boolean,
|
||||
backend: undefined | boolean,
|
||||
version: undefined | string,
|
||||
renderer: undefined | string,
|
||||
},
|
||||
} = {
|
||||
supported: undefined,
|
||||
backend: undefined,
|
||||
version: undefined,
|
||||
renderer: undefined,
|
||||
};
|
||||
/** WebGPU detected capabilities */
|
||||
webgpu: {
|
||||
supported: undefined | boolean,
|
||||
backend: undefined | boolean,
|
||||
adapter: undefined | string,
|
||||
},
|
||||
} = {
|
||||
supported: undefined,
|
||||
backend: undefined,
|
||||
adapter: undefined,
|
||||
};
|
||||
/** CPU info */
|
||||
cpu: {
|
||||
model: undefined | string,
|
||||
flags: string[],
|
||||
} = {
|
||||
model: undefined,
|
||||
flags: [],
|
||||
};
|
||||
/** List of supported kernels for current backend */
|
||||
kernels: string[],
|
||||
kernels: string[] = [];
|
||||
/** MonkeyPatch for Canvas */
|
||||
Canvas: undefined,
|
||||
Canvas: undefined;
|
||||
/** MonkeyPatch for Image */
|
||||
Image: undefined,
|
||||
Image: undefined;
|
||||
/** MonkeyPatch for ImageData */
|
||||
ImageData: undefined,
|
||||
}
|
||||
ImageData: undefined;
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let env: Env = {
|
||||
browser: undefined,
|
||||
node: undefined,
|
||||
worker: undefined,
|
||||
platform: undefined,
|
||||
agent: undefined,
|
||||
initial: true,
|
||||
backends: [],
|
||||
offscreen: undefined,
|
||||
filter: undefined,
|
||||
tfjs: {
|
||||
version: undefined,
|
||||
},
|
||||
wasm: {
|
||||
supported: undefined,
|
||||
backend: undefined,
|
||||
simd: undefined,
|
||||
multithread: undefined,
|
||||
},
|
||||
webgl: {
|
||||
supported: undefined,
|
||||
backend: undefined,
|
||||
version: undefined,
|
||||
renderer: undefined,
|
||||
},
|
||||
webgpu: {
|
||||
supported: undefined,
|
||||
backend: undefined,
|
||||
adapter: undefined,
|
||||
},
|
||||
kernels: [],
|
||||
Canvas: undefined,
|
||||
Image: undefined,
|
||||
ImageData: undefined,
|
||||
};
|
||||
|
||||
export async function cpuInfo() {
|
||||
const cpu = { model: '', flags: [] };
|
||||
if (env.node && env.platform?.startsWith('linux')) {
|
||||
// eslint-disable-next-line global-require
|
||||
const fs = require('fs');
|
||||
try {
|
||||
const data = fs.readFileSync('/proc/cpuinfo').toString();
|
||||
for (const line of data.split('\n')) {
|
||||
if (line.startsWith('model name')) {
|
||||
cpu.model = line.match(/:(.*)/g)[0].replace(':', '').trim();
|
||||
}
|
||||
if (line.startsWith('flags')) {
|
||||
cpu.flags = line.match(/:(.*)/g)[0].replace(':', '').trim().split(' ').sort();
|
||||
constructor() {
|
||||
this.browser = typeof navigator !== 'undefined';
|
||||
this.node = typeof process !== 'undefined';
|
||||
this.tfjs = { version: tf.version_core };
|
||||
this.offscreen = typeof OffscreenCanvas !== 'undefined';
|
||||
this.initial = true;
|
||||
// @ts-ignore WorkerGlobalScope evaluated in browser only
|
||||
this.worker = this.browser && this.offscreen ? (typeof WorkerGlobalScope !== 'undefined') : undefined;
|
||||
if (typeof navigator !== 'undefined') {
|
||||
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
|
||||
if (raw && raw[0]) {
|
||||
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
|
||||
this.platform = (platformMatch && platformMatch[0]) ? platformMatch[0].replace(/\(|\)/g, '') : '';
|
||||
this.agent = navigator.userAgent.replace(raw[0], '');
|
||||
if (this.platform[1]) this.agent = this.agent.replace(raw[1], '');
|
||||
this.agent = this.agent.replace(/ /g, ' ');
|
||||
// chrome offscreencanvas gpu memory leak
|
||||
/*
|
||||
const isChrome = env.agent.match(/Chrome\/.[0-9]/g);
|
||||
const verChrome = isChrome && isChrome[0] ? isChrome[0].split('/')[1] : 0;
|
||||
if (verChrome > 92 && verChrome < 96) {
|
||||
log('disabling offscreenCanvas due to browser error:', isChrome ? isChrome[0] : 'unknown');
|
||||
this.offscreen = false;
|
||||
}
|
||||
*/
|
||||
}
|
||||
} catch { /**/ }
|
||||
}
|
||||
if (!env['cpu']) Object.defineProperty(env, 'cpu', { value: cpu });
|
||||
else env['cpu'] = cpu;
|
||||
}
|
||||
|
||||
export async function backendInfo() {
|
||||
// analyze backends
|
||||
env.backends = Object.keys(tf.engine().registryFactory);
|
||||
env.wasm.supported = typeof WebAssembly !== 'undefined';
|
||||
env.wasm.backend = env.backends.includes('wasm');
|
||||
if (env.wasm.supported && env.wasm.backend && tf.getBackend() === 'wasm') {
|
||||
env.wasm.simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
|
||||
env.wasm.multithread = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
|
||||
}
|
||||
|
||||
const c = image.canvas(100, 100);
|
||||
const ctx = c ? c.getContext('webgl2') : undefined; // causes too many gl contexts
|
||||
// const ctx = typeof tf.backend().getGPGPUContext !== undefined ? tf.backend().getGPGPUContext : null;
|
||||
env.webgl.supported = typeof ctx !== 'undefined';
|
||||
env.webgl.backend = env.backends.includes('webgl');
|
||||
if (env.webgl.supported && env.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {
|
||||
// @ts-ignore getGPGPUContext only exists on WebGL backend
|
||||
const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null;
|
||||
if (gl) {
|
||||
env.webgl.version = gl.getParameter(gl.VERSION);
|
||||
env.webgl.renderer = gl.getParameter(gl.RENDERER);
|
||||
} else if (typeof process !== 'undefined') {
|
||||
this.platform = `${process.platform} ${process.arch}`;
|
||||
this.agent = `NodeJS ${process.version}`;
|
||||
}
|
||||
}
|
||||
|
||||
env.webgpu.supported = env.browser && typeof navigator['gpu'] !== 'undefined';
|
||||
env.webgpu.backend = env.backends.includes('webgpu');
|
||||
if (env.webgpu.supported) env.webgpu.adapter = (await navigator['gpu'].requestAdapter())?.name;
|
||||
|
||||
// enumerate kernels
|
||||
env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
|
||||
}
|
||||
|
||||
export async function get() {
|
||||
env.browser = typeof navigator !== 'undefined';
|
||||
env.node = typeof process !== 'undefined';
|
||||
env.tfjs.version = tf.version_core;
|
||||
|
||||
// offscreencanvas supported?
|
||||
env.offscreen = typeof env.offscreen === 'undefined' ? typeof OffscreenCanvas !== 'undefined' : env.offscreen;
|
||||
// get platform and agent
|
||||
if (typeof navigator !== 'undefined') {
|
||||
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
|
||||
if (raw && raw[0]) {
|
||||
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
|
||||
env.platform = (platformMatch && platformMatch[0]) ? platformMatch[0].replace(/\(|\)/g, '') : '';
|
||||
env.agent = navigator.userAgent.replace(raw[0], '');
|
||||
if (env.platform[1]) env.agent = env.agent.replace(raw[1], '');
|
||||
env.agent = env.agent.replace(/ /g, ' ');
|
||||
|
||||
// chrome offscreencanvas gpu memory leak
|
||||
/*
|
||||
const isChrome = env.agent.match(/Chrome\/.[0-9]/g);
|
||||
const verChrome = isChrome && isChrome[0] ? isChrome[0].split('/')[1] : 0;
|
||||
if (verChrome > 0 && verChrome > 92 && verChrome < 96) {
|
||||
log('disabling offscreenCanvas due to browser error:', isChrome ? isChrome[0] : 'unknown');
|
||||
env.offscreen = false;
|
||||
}
|
||||
*/
|
||||
async updateBackend() {
|
||||
// analyze backends
|
||||
this.backends = Object.keys(tf.engine().registryFactory);
|
||||
this.wasm.supported = typeof WebAssembly !== 'undefined';
|
||||
this.wasm.backend = this.backends.includes('wasm');
|
||||
if (this.wasm.supported && this.wasm.backend && tf.getBackend() === 'wasm') {
|
||||
this.wasm.simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
|
||||
this.wasm.multithread = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
|
||||
}
|
||||
} else if (typeof process !== 'undefined') {
|
||||
env.platform = `${process.platform} ${process.arch}`;
|
||||
env.agent = `NodeJS ${process.version}`;
|
||||
const c = image.canvas(100, 100);
|
||||
const ctx = c ? c.getContext('webgl2') : undefined; // causes too many gl contexts
|
||||
// const ctx = typeof tf.backend().getGPGPUContext !== undefined ? tf.backend().getGPGPUContext : null;
|
||||
this.webgl.supported = typeof ctx !== 'undefined';
|
||||
this.webgl.backend = this.backends.includes('webgl');
|
||||
if (this.webgl.supported && this.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {
|
||||
// @ts-ignore getGPGPUContext only exists on WebGL backend
|
||||
const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null;
|
||||
if (gl) {
|
||||
this.webgl.version = gl.getParameter(gl.VERSION);
|
||||
this.webgl.renderer = gl.getParameter(gl.RENDERER);
|
||||
}
|
||||
}
|
||||
this.webgpu.supported = this.browser && typeof navigator['gpu'] !== 'undefined';
|
||||
this.webgpu.backend = this.backends.includes('webgpu');
|
||||
if (this.webgpu.supported) this.webgpu.adapter = (await navigator['gpu'].requestAdapter())?.name;
|
||||
// enumerate kernels
|
||||
this.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
|
||||
}
|
||||
// @ts-ignore WorkerGlobalScope evaluated in browser only
|
||||
env.worker = env.browser && env.offscreen ? (typeof WorkerGlobalScope !== 'undefined') : undefined;
|
||||
await backendInfo();
|
||||
|
||||
// get cpu info
|
||||
// await cpuInfo();
|
||||
async updateCPU() {
|
||||
const cpu = { model: '', flags: [] };
|
||||
if (this.node && this.platform?.startsWith('linux')) {
|
||||
// eslint-disable-next-line global-require
|
||||
const fs = require('fs');
|
||||
try {
|
||||
const data = fs.readFileSync('/proc/cpuinfo').toString();
|
||||
for (const line of data.split('\n')) {
|
||||
if (line.startsWith('model name')) {
|
||||
cpu.model = line.match(/:(.*)/g)[0].replace(':', '').trim();
|
||||
}
|
||||
if (line.startsWith('flags')) {
|
||||
cpu.flags = line.match(/:(.*)/g)[0].replace(':', '').trim().split(' ').sort();
|
||||
}
|
||||
}
|
||||
} catch { /**/ }
|
||||
}
|
||||
if (!this['cpu']) Object.defineProperty(this, 'cpu', { value: cpu });
|
||||
else this['cpu'] = cpu;
|
||||
}
|
||||
}
|
||||
|
||||
export async function set(obj) {
|
||||
env = mergeDeep(env, obj);
|
||||
}
|
||||
export const env = new Env();
|
||||
|
|
Loading…
Reference in New Issue