added wasm and webgpu backends

pull/293/head
Vladimir Mandic 2020-10-30 10:23:49 -04:00
parent 6dbd481961
commit ce876f8c48
5 changed files with 72 additions and 20 deletions

View File

@ -9,7 +9,8 @@
- [**Change Log**](./CHANGELOG.md)
- [**Live Demo**](https://vladmandic.github.io/human/demo/index.html)
Compatible with Browser, WebWorker and NodeJS execution!
Compatible with *Browser*, *WebWorker* and *NodeJS* execution
Compatible with *CPU*, *WebGL*, *WASM* and *WebGPU* backends
(and maybe with React-Native as it doesn't use any DOM objects)
*This is a pre-release project, see [issues](https://github.com/vladmandic/human/issues) for list of known limitations and planned enhancements*
@ -159,10 +160,12 @@ If your application resides in a different folder, modify `modelPath` property i
Demos are included in `/demo`:
Browser:
**Browser**:
- `index.html`, `browser.js`, `worker.js`: Full demo using Browser with ESM module, includes selectable backends and webworkers
NodeJS:
*If you want to test `wasm` or `webgpu` backends, enable loading in `index.html`*
**NodeJS**:
- `node.js`: Demo using NodeJS with CommonJS module
This is a very simple demo as althought `Human` library is compatible with NodeJS execution
and is able to load images and models from local filesystem,

View File

@ -29,7 +29,8 @@ const ui = {
// configuration overrides
const config = {
backend: 'webgl', // if you want to use 'wasm' backend, enable script load of tf and tf-backend-wasm in index.html
backend: 'webgl',
wasm: { path: '../assets' },
filter: {
enabled: true,
width: 0,
@ -108,7 +109,7 @@ function drawResults(input, result, canvas) {
// update log
const engine = human.tf.engine();
const memory = `${engine.state.numBytes.toLocaleString()} bytes ${engine.state.numDataBuffers.toLocaleString()} buffers ${engine.state.numTensors.toLocaleString()} tensors`;
const gpu = engine.backendInstance ? `GPU: ${engine.backendInstance.numBytesInGPU.toLocaleString()} bytes` : '';
const gpu = engine.backendInstance ? `GPU: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : '';
document.getElementById('log').innerText = `
TFJS Version: ${human.tf.version_core} | Backend: ${human.tf.getBackend()} | Memory: ${memory} ${gpu}
Performance: ${str(result.performance)} | Object size: ${(str(result)).length.toLocaleString()} bytes
@ -267,6 +268,7 @@ function setupMenu() {
menu.addButton('Process Images', 'Process Images', () => detectSampleImages());
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addList('Backend', ['cpu', 'webgl', 'wasm', 'webgpu'], config.backend, (val) => config.backend = val);
menu.addBool('Use Web Worker', ui, 'useWorker');
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addLabel('Enabled Models');

View File

@ -12,8 +12,9 @@
<meta name="msapplication-tooltip" content="Human: AI-powered 3D Human Detection">
<link rel="manifest" href="../dist/human.esm.json">
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
<!-- <script src="../assets/tf.min.js"></script> -->
<!-- <script src="../assets/tf-backend-wasm.min.js"></script> -->
<!-- <script src="../assets/tf.es2017.js"></script> -->
<!-- <script src="../assets/tf-backend-wasm.es2017.js"></script> -->
<!-- <script src="../assets/tf-backend-webgpu.js"></script> -->
<script src="./browser.js" type="module"></script>
</head>
<body style="margin: 0; background: black; color: white; font-family: 'Segoe UI'; font-size: 16px; font-variant: small-caps; overflow-x: hidden">

View File

@ -8,9 +8,13 @@ const css = `
.menu-container-fadein { max-height: 100vh; overflow: hidden; transition: max-height, 0.5s ease; }
.menu-item { display: flex; white-space: nowrap; background: darkslategray; padding: 0.2rem; width: max-content; }
.menu-title { text-align: right; cursor: pointer; }
.menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5) }
.menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5); }
.menu-label { padding: 0; }
.menu-list { margin-right: 0.8rem; }
select:focus { outline: none; }
.menu-list-item { background: black; color: white; border: none; padding: 0.2rem; font-family: inherit; font-variant: inherit; border-radius: 1rem; }
.menu-chart-title { align-items: center; }
.menu-chart-canvas { background: transparent; height: 40px; width: 180px; margin: 0.2rem 0.2rem 0.2rem 1rem; }
@ -130,6 +134,24 @@ class Menu {
});
}
async addList(title, items, selected, callback) {
const el = document.createElement('div');
el.className = 'menu-item';
let options = '';
for (const item of items) {
const def = item === selected ? 'selected' : '';
options += `<option value="${item}" ${def}>${item}</option>`;
}
el.innerHTML = `<div class="menu-list"><select name="${this.ID}" class="menu-list-item">${options}</select><label for="${this.ID}"></label></div>${title}`;
el.style.fontFamily = document.body.style.fontFamily;
el.style.fontSize = document.body.style.fontSize;
el.style.fontVariant = document.body.style.fontVariant;
this.container.appendChild(el);
el.addEventListener('change', (evt) => {
if (callback) callback(items[evt.target.selectedIndex]);
});
}
async addRange(title, object, variable, min, max, step, callback) {
const el = document.createElement('div');
el.className = 'menu-item';

View File

@ -126,17 +126,33 @@ class Human {
}
}
async resetBackend(backendName) {
if (tf.getBackend() !== this.config.backend) {
this.state = 'backend';
if (backendName in tf.engine().registry) {
this.log('Human library setting backend:', this.config.backend);
this.config.backend = backendName;
const backendFactory = tf.findBackendFactory(backendName);
tf.removeBackend(backendName);
tf.registerBackend(backendName, backendFactory);
await tf.setBackend(backendName);
await tf.ready();
} else {
this.log('Human library backend not registred:', backendName);
}
}
}
tfImage(input) {
// let imageData;
let filtered;
const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));
let targetWidth = originalWidth;
let targetHeight = originalHeight;
if (this.fx && this.config.filter.enabled && !(input instanceof tf.Tensor)) {
const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));
let targetWidth = originalWidth;
if (this.config.filter.width > 0) targetWidth = this.config.filter.width;
else if (this.config.filter.height > 0) targetWidth = originalWidth * (this.config.filter.height / originalHeight);
let targetHeight = originalHeight;
if (this.config.filter.height > 0) targetHeight = this.config.filter.height;
else if (this.config.filter.width > 0) targetHeight = originalHeight * (this.config.filter.width / originalWidth);
const offscreenCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
@ -166,7 +182,20 @@ class Human {
if (input instanceof tf.Tensor) {
tensor = tf.clone(input);
} else {
const pixels = tf.browser.fromPixels(filtered || input);
const canvas = filtered || input;
let pixels;
// tf kernel-optimized method to get imagedata, also if input is imagedata, just use it
if ((this.config.backend === 'webgl') || (canvas instanceof ImageData)) pixels = tf.browser.fromPixels(canvas);
// cpu and wasm kernel does not implement efficient fromPixels method nor we can use canvas as-is, so we do a silly one more canvas
else {
const tempCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
tempCanvas.width = targetWidth;
tempCanvas.height = targetHeight;
const tempCtx = tempCanvas.getContext('2d');
tempCtx.drawImage(canvas, 0, 0);
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
pixels = tf.browser.fromPixels(data);
}
const casted = pixels.toFloat();
tensor = casted.expandDims(0);
pixels.dispose();
@ -197,12 +226,7 @@ class Human {
// configure backend
timeStamp = now();
if (tf.getBackend() !== this.config.backend) {
this.state = 'backend';
this.log('Human library setting backend:', this.config.backend);
await tf.setBackend(this.config.backend);
await tf.ready();
}
await this.resetBackend(this.config.backend);
perf.backend = Math.trunc(now() - timeStamp);
// check number of loaded models