mirror of https://github.com/vladmandic/human
architectural improvements
parent
1b53b190b1
commit
cc4650c151
|
@ -9,7 +9,7 @@
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2022/10/28 mandic00@live.com
|
||||
### **HEAD -> main** 2022/11/04 mandic00@live.com
|
||||
|
||||
- add named exports
|
||||
- add draw label templates
|
||||
|
|
21
TODO.md
21
TODO.md
|
@ -51,10 +51,29 @@ No support for running in **web workers** as Safari still does not support `Offs
|
|||
|
||||
## Pending Release Changes
|
||||
|
||||
Optimizations:
|
||||
- Enabled high-resolution optimizations
|
||||
Internal limits are increased from **2k** to **4k**
|
||||
- Enhanced device capabilities detection
|
||||
See `human.env.[agent, wasm, webgl, webgpu]` for details
|
||||
- If `config.backend` is not set, Human will auto-select best backend
|
||||
based on device capabilities
|
||||
- Enhanced support for `webgpu`
|
||||
|
||||
Features:
|
||||
- Add [draw label templates](https://github.com/vladmandic/human/wiki/Draw)
|
||||
- Add `config.filter.autoBrightness` (*enabled by default*)
|
||||
Per-frame video on-the-fly brightness adjustments
|
||||
Which significantly increases performance and precision in poorly lit scenes
|
||||
- Improved `config.filter.equalization` (*disabled by default*)
|
||||
Image and video on-demand histogram equalization
|
||||
|
||||
Architecture:
|
||||
- Upgrade to TFJS 4.0 with **strong typing**
|
||||
see [notes](https://github.com/vladmandic/human#typedefs) on how to use
|
||||
- `TypeDef` refactoring
|
||||
- Add [draw label templates](https://github.com/vladmandic/human/wiki/Draw)
|
||||
- Reduce build dependencies
|
||||
`Human` is now 30% smaller :)
|
||||
As usual, `Human` has **zero** runtime dependencies,
|
||||
all *devDependencies* are only to rebuild `Human` itself
|
||||
- Add named export for improved bundler support when using non-default imports
|
||||
|
|
|
@ -4,6 +4,6 @@
|
|||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
|
||||
import*as m from"../../dist/human.esm.js";var w={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(w);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+`
|
||||
`,console.log(...t)},d=t=>a.fps.innerText=t,v=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function f(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(f)}async function u(){var i,l,c;if(!a.video.paused){let r=e.next(e.result);e.config.filter.flip?e.draw.canvas(r.canvas,a.canvas):e.draw.canvas(a.video,a.canvas);let p={bodyLabels:`person confidence [score] and ${(c=(l=(i=e.result)==null?void 0:i.body)==null?void 0:l[0])==null?void 0:c.keypoints.length} keypoints`};await e.draw.all(a.canvas,r,p),v(r.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,d(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(u,30)}async function g(){await e.webcam.start({element:a.video,crop:!0}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function b(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),d("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.getModelStats()),o("models loaded:",Object.values(e.models).filter(t=>t!==null).length),o("environment",e.env),d("initializing..."),await e.warmup(),await g(),await f(),await u()}window.onload=b;
|
||||
import*as m from"../../dist/human.esm.js";var f=1920,g={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1,width:f},face:{enabled:!0,detector:{rotation:!0},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(g);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},o={detectFPS:0,drawFPS:0,frames:0,averageMs:0},s=(...t)=>{a.log.innerText+=t.join(" ")+`
|
||||
`,console.log(...t)},r=t=>a.fps.innerText=t,b=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function u(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&s("allocated tensors:",t-n.tensors),n.tensors=t,o.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,o.frames++,o.averageMs=Math.round(1e3*(e.now()-n.start)/o.frames)/1e3,o.frames%100===0&&!a.video.paused&&s("performance",{...o,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(u)}async function p(){var d,i,l;if(!a.video.paused){let c=e.next(e.result),w=await e.image(a.video);e.draw.canvas(w.canvas,a.canvas);let v={bodyLabels:`person confidence [score] and ${(l=(i=(d=e.result)==null?void 0:d.body)==null?void 0:i[0])==null?void 0:l.keypoints.length} keypoints`};await e.draw.all(a.canvas,c,v),b(c.performance)}let t=e.now();o.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,r(a.video.paused?"paused":`fps: ${o.detectFPS.toFixed(1).padStart(5," ")} detect | ${o.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(p,30)}async function h(){await e.webcam.start({element:a.video,crop:!0,width:f}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function y(){s("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),s("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),s("backend:",e.tf.getBackend(),"| available:",e.env.backends),s("models stats:",e.getModelStats()),s("models loaded:",Object.values(e.models).filter(t=>t!==null).length),s("environment",e.env),r("initializing..."),await e.warmup(),await h(),await u(),await p()}window.onload=y;
|
||||
//# sourceMappingURL=index.js.map
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -9,11 +9,13 @@
|
|||
|
||||
import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
|
||||
|
||||
const width = 1920; // used by webcam config as well as human maximum resultion // can be anything, but resolutions higher than 4k will disable internal optimizations
|
||||
|
||||
const humanConfig: Partial<H.Config> = { // user configuration for human, used to fine-tune behavior
|
||||
// backend: 'wasm',
|
||||
// backend: 'webgpu',
|
||||
modelBasePath: '../../models',
|
||||
filter: { enabled: true, equalization: false, flip: false },
|
||||
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },
|
||||
filter: { enabled: true, equalization: false, flip: false, width },
|
||||
face: { enabled: true, detector: { rotation: true }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },
|
||||
body: { enabled: true },
|
||||
hand: { enabled: true },
|
||||
object: { enabled: false },
|
||||
|
@ -65,8 +67,8 @@ async function detectionLoop() { // main detection loop
|
|||
async function drawLoop() { // main screen refresh loop
|
||||
if (!dom.video.paused) {
|
||||
const interpolated = human.next(human.result); // smoothen result using last-known results
|
||||
if (human.config.filter.flip) human.draw.canvas(interpolated.canvas as HTMLCanvasElement, dom.canvas); // draw processed image to screen canvas
|
||||
else human.draw.canvas(dom.video, dom.canvas); // draw original video to screen canvas // better than using procesed image as this loop happens faster than processing loop
|
||||
const processed = await human.image(dom.video); // get current video frame, but enhanced with human.filters
|
||||
human.draw.canvas(processed.canvas as HTMLCanvasElement, dom.canvas);
|
||||
|
||||
const opt: Partial<H.DrawOptions> = { bodyLabels: `person confidence [score] and ${human.result?.body?.[0]?.keypoints.length} keypoints` };
|
||||
await human.draw.all(dom.canvas, interpolated, opt); // draw labels, boxes, lines, etc.
|
||||
|
@ -80,7 +82,7 @@ async function drawLoop() { // main screen refresh loop
|
|||
}
|
||||
|
||||
async function webCam() {
|
||||
await human.webcam.start({ element: dom.video, crop: true }); // use human webcam helper methods and associate webcam stream with a dom element
|
||||
await human.webcam.start({ element: dom.video, crop: true, width }); // use human webcam helper methods and associate webcam stream with a dom element
|
||||
dom.canvas.width = human.webcam.width;
|
||||
dom.canvas.height = human.webcam.height;
|
||||
dom.canvas.onclick = async () => { // pause when clicked on screen and resume on next click
|
||||
|
|
|
@ -74,7 +74,7 @@
|
|||
"devDependencies": {
|
||||
"@html-eslint/eslint-plugin": "^0.15.0",
|
||||
"@html-eslint/parser": "^0.15.0",
|
||||
"@microsoft/api-extractor": "^7.33.5",
|
||||
"@microsoft/api-extractor": "^7.33.6",
|
||||
"@tensorflow/tfjs-backend-cpu": "^4.0.0",
|
||||
"@tensorflow/tfjs-backend-wasm": "^4.0.0",
|
||||
"@tensorflow/tfjs-backend-webgl": "^4.0.0",
|
||||
|
@ -85,14 +85,14 @@
|
|||
"@tensorflow/tfjs-node-gpu": "^4.0.0",
|
||||
"@types/node": "^18.11.9",
|
||||
"@types/offscreencanvas": "^2019.7.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.42.0",
|
||||
"@typescript-eslint/parser": "^5.42.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.42.1",
|
||||
"@typescript-eslint/parser": "^5.42.1",
|
||||
"@vladmandic/build": "^0.7.14",
|
||||
"@vladmandic/pilogger": "^0.4.6",
|
||||
"@vladmandic/tfjs": "github:vladmandic/tfjs",
|
||||
"canvas": "^2.10.2",
|
||||
"esbuild": "^0.15.13",
|
||||
"eslint": "8.26.0",
|
||||
"eslint": "8.27.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-plugin-html": "^7.1.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
|
|
|
@ -186,6 +186,8 @@ export interface FilterConfig {
|
|||
return: boolean,
|
||||
/** flip input as mirror image */
|
||||
flip: boolean,
|
||||
/** apply auto-brighness */
|
||||
autoBrightness: boolean,
|
||||
/** range: -1 (darken) to 1 (lighten) */
|
||||
brightness: number,
|
||||
/** range: -1 (reduce contrast) to 1 (increase contrast) */
|
||||
|
@ -350,6 +352,7 @@ const config: Config = {
|
|||
height: 0,
|
||||
flip: false,
|
||||
return: true,
|
||||
autoBrightness: true,
|
||||
brightness: 0,
|
||||
contrast: 0,
|
||||
sharpness: 0,
|
||||
|
|
|
@ -139,7 +139,6 @@ export class Human {
|
|||
const tfVersion = (tf.version.tfjs || tf.version_core).replace(/-(.*)/, '');
|
||||
defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tfVersion}/dist/`;
|
||||
defaults.modelBasePath = env.browser ? '../models/' : 'file://models/';
|
||||
defaults.backend = env.browser ? 'webgl' : 'tensorflow';
|
||||
this.version = app.version; // expose version property on instance of class
|
||||
Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself
|
||||
this.config = JSON.parse(JSON.stringify(defaults));
|
||||
|
@ -252,7 +251,7 @@ export class Human {
|
|||
* @param getTensor - should image processing also return tensor or just canvas
|
||||
* Returns object with `tensor` and `canvas`
|
||||
*/
|
||||
image(input: Input, getTensor: boolean = true) {
|
||||
image(input: Input, getTensor: boolean = false) {
|
||||
return image.process(input, this.config, getTensor);
|
||||
}
|
||||
|
||||
|
@ -455,6 +454,7 @@ export class Human {
|
|||
|
||||
timeStamp = now();
|
||||
this.config.skipAllowed = await image.skip(this.config, img.tensor);
|
||||
this.config.filter.autoBrightness = (this.config.filter.autoBrightness || false) && this.config.skipAllowed; // disable autoBrightness on scene change
|
||||
if (!this.performance.totalFrames) this.performance.totalFrames = 0;
|
||||
if (!this.performance.cachedFrames) this.performance.cachedFrames = 0;
|
||||
(this.performance.totalFrames)++;
|
||||
|
|
|
@ -6,19 +6,23 @@ import * as tf from 'dist/tfjs.esm.js';
|
|||
import type { Tensor } from '../exports';
|
||||
|
||||
export async function histogramEqualization(inputImage: Tensor): Promise<Tensor> {
|
||||
// const maxValue = 254; // using 255 results in values slightly larger than 1 due to math rounding errors
|
||||
const squeeze = inputImage.shape.length === 4 ? tf.squeeze(inputImage) : inputImage;
|
||||
const channels = tf.split(squeeze, 3, 2);
|
||||
const min: Tensor[] = [tf.min(channels[0]), tf.min(channels[1]), tf.min(channels[2])];
|
||||
const max: Tensor[] = [tf.max(channels[0]), tf.max(channels[1]), tf.max(channels[2])];
|
||||
const absMax = await Promise.all(max.map((channel) => channel.data()));
|
||||
const maxValue = 0.99 * Math.max(absMax[0][0], absMax[1][0], absMax[2][0]);
|
||||
const sub = [tf.sub(channels[0], min[0]), tf.sub(channels[1], min[1]), tf.sub(channels[2], min[2])];
|
||||
const range = [tf.sub(max[0], min[0]), tf.sub(max[1], min[1]), tf.sub(max[2], min[2])];
|
||||
const fact = [tf.div(maxValue, range[0]), tf.div(maxValue, range[1]), tf.div(maxValue, range[2])];
|
||||
const enh = [tf.mul(sub[0], fact[0]), tf.mul(sub[1], fact[1]), tf.mul(sub[2], fact[2])];
|
||||
const rgb = tf.stack([enh[0], enh[1], enh[2]], 2);
|
||||
const reshape = tf.reshape(rgb, [1, squeeze.shape[0] || 0, squeeze.shape[1] || 0, 3]);
|
||||
tf.dispose([...channels, ...min, ...max, ...sub, ...range, ...fact, ...enh, rgb, squeeze]);
|
||||
return reshape; // output shape is [1, height, width, 3]
|
||||
const rgb = tf.split(squeeze, 3, 2);
|
||||
const min: Tensor[] = [tf.min(rgb[0]), tf.min(rgb[1]), tf.min(rgb[2])]; // minimum pixel value per channel T[]
|
||||
const max: Tensor[] = [tf.max(rgb[0]), tf.max(rgb[1]), tf.max(rgb[2])]; // maximum pixel value per channel T[]
|
||||
// const absMin = await Promise.all(min.map((channel) => channel.data())); // minimum pixel value per channel A[]
|
||||
// const minValue = Math.min(absMax[0][0], absMin[1][0], absMin[2][0]);
|
||||
const absMax = await Promise.all(max.map((channel) => channel.data())); // maximum pixel value per channel A[]
|
||||
const maxValue = Math.max(absMax[0][0], absMax[1][0], absMax[2][0]);
|
||||
const maxRange = maxValue > 1 ? 255 : 1;
|
||||
const factor = maxRange / maxValue;
|
||||
const sub = [tf.sub(rgb[0], min[0]), tf.sub(rgb[1], min[1]), tf.sub(rgb[2], min[2])]; // channels offset by min values
|
||||
const range = [tf.sub(max[0], min[0]), tf.sub(max[1], min[1]), tf.sub(max[2], min[2])]; // channel ranges
|
||||
// const fact = [tf.div(maxRange, absMax[0]), tf.div(maxRange, absMax[1]), tf.div(maxRange, absMax[1])]; // factors between
|
||||
const enh = [tf.mul(sub[0], factor), tf.mul(sub[1], factor), tf.mul(sub[2], factor)];
|
||||
const stack = tf.stack([enh[0], enh[1], enh[2]], 2);
|
||||
const reshape = tf.reshape(stack, [1, squeeze.shape[0] || 0, squeeze.shape[1] || 0, 3]);
|
||||
const final = tf.squeeze(reshape);
|
||||
tf.dispose([...rgb, ...min, ...max, ...sub, ...range, ...enh, rgb, squeeze, reshape]);
|
||||
return final; // output shape is [height, width, 3]
|
||||
}
|
||||
|
|
|
@ -188,7 +188,7 @@ export async function process(input: Input, config: Config, getTensor: boolean =
|
|||
if (config.filter.technicolor) fx.add('technicolor');
|
||||
if (config.filter.polaroid) fx.add('polaroid');
|
||||
if (config.filter.pixelate !== 0) fx.add('pixelate', config.filter.pixelate);
|
||||
if (fx.get() > 0) outCanvas = fx.apply(inCanvas);
|
||||
if (fx.get()?.length > 1) outCanvas = fx.apply(inCanvas);
|
||||
else outCanvas = fx.draw(inCanvas);
|
||||
}
|
||||
} else {
|
||||
|
@ -238,6 +238,14 @@ export async function process(input: Input, config: Config, getTensor: boolean =
|
|||
if (!pixels) throw new Error('input error: cannot create tensor');
|
||||
const casted: Tensor = tf.cast(pixels, 'float32');
|
||||
const tensor: Tensor = config.filter.equalization ? await enhance.histogramEqualization(casted) : tf.expandDims(casted, 0);
|
||||
|
||||
if (config.filter.autoBrightness) {
|
||||
const max = tf.max(tensor);
|
||||
const maxVal = await max.data();
|
||||
config.filter.brightness = maxVal[0] > 1 ? (1 - maxVal[0] / 255) : (1 - maxVal[0]);
|
||||
tf.dispose(max);
|
||||
}
|
||||
|
||||
tf.dispose([pixels, casted]);
|
||||
return { tensor: tensor as Tensor4D, canvas: (config.filter.return ? outCanvas : null) };
|
||||
}
|
||||
|
@ -274,7 +282,7 @@ const checksum = async (input: Tensor): Promise<number> => { // use tf sum or js
|
|||
|
||||
export async function skip(config: Partial<Config>, input: Tensor) {
|
||||
let skipFrame = false;
|
||||
if (config.cacheSensitivity === 0 || !input.shape || input.shape.length !== 4 || input.shape[1] > 2048 || input.shape[2] > 2048) return skipFrame; // cache disabled or input is invalid or too large for cache analysis
|
||||
if (config.cacheSensitivity === 0 || !input.shape || input.shape.length !== 4 || input.shape[1] > 3840 || input.shape[2] > 2160) return skipFrame; // cache disabled or input is invalid or too large for cache analysis
|
||||
|
||||
/*
|
||||
const checkSum = await checksum(input);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/**
|
||||
* CenterNet object detection model implementation
|
||||
*
|
||||
* Based on: [**NanoDet**](https://github.com/RangiLyu/nanodet)
|
||||
* Based on: [**MB3-CenterNet**](https://github.com/610265158/mobilenetv3_centernet)
|
||||
*/
|
||||
|
||||
import * as tf from 'dist/tfjs.esm.js';
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/**
|
||||
* NanoDet object detection model implementation
|
||||
*
|
||||
* Based on: [**MB3-CenterNet**](https://github.com/610265158/mobilenetv3_centernet)
|
||||
* Based on: [**NanoDet**](https://github.com/RangiLyu/nanodet)
|
||||
*/
|
||||
|
||||
import * as tf from 'dist/tfjs.esm.js';
|
||||
|
|
|
@ -8,6 +8,15 @@ import * as humangl from './humangl';
|
|||
import * as constants from './constants';
|
||||
import type { TensorInfo } from './types';
|
||||
|
||||
export async function getBestBackend(): Promise<BackendEnum> {
|
||||
await env.updateBackend(); // update env on backend init
|
||||
if (!env.browser) return 'tensorflow';
|
||||
if (env.webgpu.supported && env.webgpu.backend) return 'webgpu';
|
||||
if (env.webgl.supported && env.webgl.backend) return 'webgl';
|
||||
if (env.wasm.supported && env.wasm.backend) return 'wasm';
|
||||
return 'cpu';
|
||||
}
|
||||
|
||||
function registerCustomOps(config: Config) {
|
||||
const newKernels: string[] = [];
|
||||
if (!env.kernels.includes('mod')) {
|
||||
|
@ -73,6 +82,7 @@ let defaultFlags: Record<string, unknown> = {};
|
|||
|
||||
export async function check(instance: Human, force = false) {
|
||||
instance.state = 'backend';
|
||||
if (instance.config.backend?.length === 0) instance.config.backend = await getBestBackend();
|
||||
if (force || env.initial || (instance.config.backend && (instance.config.backend.length > 0) && (tf.getBackend() !== instance.config.backend))) {
|
||||
const timeStamp = now();
|
||||
|
||||
|
|
|
@ -53,17 +53,21 @@ export class Env {
|
|||
backend: undefined | boolean,
|
||||
version: undefined | string,
|
||||
renderer: undefined | string,
|
||||
shader: undefined | string,
|
||||
vendor: undefined | string,
|
||||
} = {
|
||||
supported: undefined,
|
||||
backend: undefined,
|
||||
version: undefined,
|
||||
renderer: undefined,
|
||||
shader: undefined,
|
||||
vendor: undefined,
|
||||
};
|
||||
/** WebGPU detected capabilities */
|
||||
webgpu: {
|
||||
supported: undefined | boolean,
|
||||
backend: undefined | boolean,
|
||||
adapter: undefined | string,
|
||||
adapter: undefined | GPUAdapterInfo,
|
||||
} = {
|
||||
supported: undefined,
|
||||
backend: undefined,
|
||||
|
@ -123,35 +127,36 @@ export class Env {
|
|||
async updateBackend() {
|
||||
// analyze backends
|
||||
this.backends = Object.keys(tf.engine().registryFactory);
|
||||
this.tensorflow = {
|
||||
version: (tf.backend()['binding'] ? tf.backend()['binding'].TF_Version : undefined),
|
||||
gpu: (tf.backend()['binding'] ? tf.backend()['binding'].isUsingGpuDevice() : undefined),
|
||||
};
|
||||
try {
|
||||
this.tensorflow = {
|
||||
version: (tf.backend()['binding'] ? tf.backend()['binding'].TF_Version : undefined),
|
||||
gpu: (tf.backend()['binding'] ? tf.backend()['binding'].isUsingGpuDevice() : undefined),
|
||||
};
|
||||
} catch { /**/ }
|
||||
this.wasm.supported = typeof WebAssembly !== 'undefined';
|
||||
this.wasm.backend = this.backends.includes('wasm');
|
||||
if (this.wasm.supported && this.wasm.backend && tf.getBackend() === 'wasm') {
|
||||
this.wasm.simd = tf.env().get('WASM_HAS_SIMD_SUPPORT') as boolean;
|
||||
this.wasm.multithread = tf.env().get('WASM_HAS_MULTITHREAD_SUPPORT') as boolean;
|
||||
if (this.wasm.supported && this.wasm.backend) {
|
||||
this.wasm.simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT') as boolean;
|
||||
this.wasm.multithread = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT') as boolean;
|
||||
}
|
||||
const c = image.canvas(100, 100);
|
||||
const ctx = c ? c.getContext('webgl2') : undefined; // causes too many gl contexts
|
||||
// const ctx = typeof tf.backend().getGPGPUContext !== undefined ? tf.backend().getGPGPUContext : null;
|
||||
this.webgl.supported = typeof ctx !== 'undefined';
|
||||
this.webgl.backend = this.backends.includes('webgl');
|
||||
if (this.webgl.supported && this.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {
|
||||
const backend = tf.backend();
|
||||
const gl = typeof backend['gpgpu'] !== 'undefined' ? backend['getGPGPUContext']().gl : null;
|
||||
if (gl) {
|
||||
this.webgl.version = gl.getParameter(gl.VERSION);
|
||||
this.webgl.renderer = gl.getParameter(gl.RENDERER);
|
||||
}
|
||||
if (this.webgl.supported && this.webgl.backend) {
|
||||
const gl = ctx as WebGL2RenderingContext;
|
||||
this.webgl.version = gl.getParameter(gl.VERSION);
|
||||
this.webgl.vendor = gl.getParameter(gl.VENDOR);
|
||||
this.webgl.renderer = gl.getParameter(gl.RENDERER);
|
||||
this.webgl.shader = gl.getParameter(gl.SHADING_LANGUAGE_VERSION);
|
||||
}
|
||||
this.webgpu.supported = this.browser && typeof navigator.gpu !== 'undefined';
|
||||
this.webgpu.backend = this.backends.includes('webgpu');
|
||||
try {
|
||||
if (this.webgpu.supported) {
|
||||
const adapter = await navigator.gpu.requestAdapter();
|
||||
this.webgpu.adapter = adapter ? adapter.name : undefined;
|
||||
this.webgpu.adapter = await adapter?.requestAdapterInfo();
|
||||
}
|
||||
} catch {
|
||||
this.webgpu.supported = false;
|
||||
|
|
|
@ -1,40 +1,40 @@
|
|||
2022-11-04 13:19:02 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-11-04 13:19:02 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-11-04 13:19:02 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2022-11-04 13:19:02 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.13","typescript":"4.8.4","typedoc":"0.23.20","eslint":"8.26.0"}
|
||||
2022-11-04 13:19:02 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":79,"inputBytes":672790,"outputBytes":316303}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":79,"inputBytes":672794,"outputBytes":316307}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":79,"inputBytes":673742,"outputBytes":316418}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":79,"inputBytes":672536,"outputBytes":314908}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1144900}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":79,"inputBytes":1816766,"outputBytes":1456466}
|
||||
2022-11-04 13:19:02 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":79,"inputBytes":1816766,"outputBytes":1913830}
|
||||
2022-11-04 13:19:06 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||
2022-11-04 13:19:08 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":76,"generated":true}
|
||||
2022-11-04 13:19:08 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5936,"outputBytes":2867}
|
||||
2022-11-04 13:19:08 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17134,"outputBytes":9181}
|
||||
2022-11-04 13:19:16 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":116,"errors":0,"warnings":0}
|
||||
2022-11-04 13:19:16 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2022-11-04 13:19:16 [35mSTATE:[39m Copy: {"input":"src/tfjs","output":"dist/tfjs.esm.d.ts"}
|
||||
2022-11-04 13:19:16 [36mINFO: [39m Done...
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":195}
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m Link: {"input":"types/human.d.ts"}
|
||||
2022-11-04 13:19:17 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m Models {"folder":"../human-models/models","models":43}
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||
2022-11-04 13:19:17 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||
2022-11-04 13:19:18 [35mSTATE:[39m Models: {"count":58,"totalSize":386543911}
|
||||
2022-11-04 13:19:18 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||
2022-11-10 20:16:03 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-11-10 20:16:03 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-11-10 20:16:03 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2022-11-10 20:16:03 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.13","typescript":"4.8.4","typedoc":"0.23.20","eslint":"8.27.0"}
|
||||
2022-11-10 20:16:03 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":79,"inputBytes":674094,"outputBytes":316829}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":79,"inputBytes":674098,"outputBytes":316833}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":79,"inputBytes":675046,"outputBytes":316944}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":79,"inputBytes":673840,"outputBytes":315438}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1144900}
|
||||
2022-11-10 20:16:03 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":79,"inputBytes":1818070,"outputBytes":1457034}
|
||||
2022-11-10 20:16:04 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":79,"inputBytes":1818070,"outputBytes":1914674}
|
||||
2022-11-10 20:16:08 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||
2022-11-10 20:16:10 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":76,"generated":true}
|
||||
2022-11-10 20:16:10 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5981,"outputBytes":2862}
|
||||
2022-11-10 20:16:10 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17134,"outputBytes":9181}
|
||||
2022-11-10 20:16:19 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":116,"errors":0,"warnings":0}
|
||||
2022-11-10 20:16:19 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2022-11-10 20:16:19 [35mSTATE:[39m Copy: {"input":"src/tfjs","output":"dist/tfjs.esm.d.ts"}
|
||||
2022-11-10 20:16:19 [36mINFO: [39m Done...
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":195}
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m Link: {"input":"types/human.d.ts"}
|
||||
2022-11-10 20:16:20 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m Models {"folder":"../human-models/models","models":43}
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||
2022-11-10 20:16:20 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||
2022-11-10 20:16:21 [35mSTATE:[39m Models: {"count":58,"totalSize":386543911}
|
||||
2022-11-10 20:16:21 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||
|
|
Loading…
Reference in New Issue