diff --git a/demo/browser.js b/demo/browser.js
index 9092b2c4..78b1d690 100644
--- a/demo/browser.js
+++ b/demo/browser.js
@@ -4,6 +4,8 @@ import Menu from './menu.js';
const human = new Human();
+const userConfig = {}; // add any user configuration overrides
+
// ui options
const ui = {
baseColor: 'rgba(173, 216, 230, 0.3)', // 'lightblue' with light alpha channel
@@ -220,9 +222,9 @@ function runHumanDetect(input, canvas) {
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
const data = ctx.getImageData(0, 0, canvas.width, canvas.height);
// perform detection in worker
- webWorker(input, data, canvas);
+ webWorker(input, data, canvas, userConfig);
} else {
- human.detect(input).then((result) => {
+ human.detect(input, userConfig).then((result) => {
if (result.error) log(result.error);
else drawResults(input, result, canvas);
});
@@ -241,7 +243,7 @@ async function processImage(input) {
image.height = image.naturalHeight;
canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image.naturalWidth;
canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image.naturalHeight;
- const result = await human.detect(image);
+ const result = await human.detect(image, userConfig);
drawResults(image, result, canvas);
const thumb = document.createElement('canvas');
thumb.className = 'thumbnail';
@@ -383,15 +385,16 @@ async function main() {
log('Human: demo starting ...');
setupMenu();
document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;
- // this is not required, just pre-warms the library
+ // this is not required, just pre-loads all models
if (ui.modelsPreload) {
status('loading');
- await human.load();
+ await human.load(userConfig);
}
+ // this is not required, just pre-warms all models for faster initial inference
if (ui.modelsWarmup) {
status('initializing');
- const warmup = new ImageData(50, 50);
- await human.detect(warmup);
+ const warmup = new ImageData(256, 256);
+ await human.detect(warmup, userConfig);
}
status('human: ready');
document.getElementById('loader').style.display = 'none';
diff --git a/demo/index.html b/demo/index.html
index 28698af2..b45f2bf5 100644
--- a/demo/index.html
+++ b/demo/index.html
@@ -15,7 +15,7 @@
-
+
diff --git a/demo/worker.js b/demo/worker.js
index e796631e..8b62675d 100644
--- a/demo/worker.js
+++ b/demo/worker.js
@@ -16,7 +16,7 @@ onmessage = async (msg) => {
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
let result = {};
try {
- result = await human.detect(image);
+ result = await human.detect(image, msg.data.userConfig);
} catch (err) {
result.error = err.message;
log('worker thread error:', err.message);
diff --git a/src/human.js b/src/human.js
index b23fa91d..0a306bee 100644
--- a/src/human.js
+++ b/src/human.js
@@ -42,10 +42,10 @@ function mergeDeep(...objects) {
}
class Human {
- constructor() {
+ constructor(userConfig = {}) {
this.tf = tf;
this.version = app.version;
- this.config = defaults;
+ this.config = mergeDeep(defaults, userConfig);
this.fx = null;
this.state = 'idle';
this.numTensors = 0;
@@ -152,7 +152,7 @@ class Human {
// check if backend needs initialization if it changed
async checkBackend(force) {
const timeStamp = now();
- if (force || (tf.getBackend() !== this.config.backend)) {
+ if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {
this.state = 'backend';
/* force backend reload
if (this.config.backend in tf.engine().registry) {
@@ -167,16 +167,16 @@ class Human {
await tf.setBackend(this.config.backend);
tf.enableProdMode();
/* debug mode is really too mcuh
- if (this.config.profile) tf.enableDebugMode();
- else tf.enableProdMode();
+ tf.enableDebugMode();
*/
- if (this.config.deallocate && this.config.backend === 'webgl') {
- this.log('Changing WebGL: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);
- tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);
+ if (this.config.backend === 'webgl') {
+ if (this.config.deallocate) {
+ this.log('Changing WebGL: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);
+ tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);
+ }
+ // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
+ tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
}
- // tf.ENV.set('WEBGL_CPU_FORWARD', true);
- // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
- tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
await tf.ready();
}
const current = Math.trunc(now() - timeStamp);