enhanced processing resolution

pull/293/head
Vladimir Mandic 2020-11-03 10:55:33 -05:00
parent de5d299eee
commit 0153841891
3 changed files with 43 additions and 31 deletions

View File

@ -69,6 +69,7 @@ let menu;
let menuFX; let menuFX;
let worker; let worker;
let timeStamp; let timeStamp;
let camera = {};
const fps = []; const fps = [];
// helper function: translates json to human readable string // helper function: translates json to human readable string
@ -111,19 +112,26 @@ function drawResults(input, result, canvas) {
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d');
ctx.fillStyle = ui.baseBackground; ctx.fillStyle = ui.baseBackground;
ctx.fillRect(0, 0, canvas.width, canvas.height); ctx.fillRect(0, 0, canvas.width, canvas.height);
if (result.canvas) ctx.drawImage(result.canvas, 0, 0, result.canvas.width, result.canvas.height, 0, 0, result.canvas.width, result.canvas.height); if (result.canvas) {
else ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height); if (result.canvas.width !== canvas.width) canvas.width = result.canvas.width;
if (result.canvas.height !== canvas.height) canvas.height = result.canvas.height;
ctx.drawImage(result.canvas, 0, 0, result.canvas.width, result.canvas.height, 0, 0, result.canvas.width, result.canvas.height);
} else {
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
}
// draw all results // draw all results
draw.face(result.face, canvas, ui, human.facemesh.triangulation); draw.face(result.face, canvas, ui, human.facemesh.triangulation);
draw.body(result.body, canvas, ui); draw.body(result.body, canvas, ui);
draw.hand(result.hand, canvas, ui); draw.hand(result.hand, canvas, ui);
// update log // update log
const engine = human.tf.engine(); const engine = human.tf.engine();
const memory = `${engine.state.numBytes.toLocaleString()} bytes ${engine.state.numDataBuffers.toLocaleString()} buffers ${engine.state.numTensors.toLocaleString()} tensors`; const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : '';
const gpu = engine.backendInstance ? `GPU: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : ''; const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} tensors: ${engine.state.numTensors.toLocaleString()}`;
const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : '';
document.getElementById('log').innerText = ` document.getElementById('log').innerText = `
TFJS Version: ${human.tf.version_core} | Backend: ${human.tf.getBackend()} | Memory: ${memory} ${gpu} video: ${camera.name} facing: ${camera.facing} resolution: ${camera.width} x ${camera.height} ${processing}
Performance: ${str(result.performance)} | Object size: ${(str(result)).length.toLocaleString()} bytes backend: ${human.tf.getBackend()} | ${memory} | object size: ${(str(result)).length.toLocaleString()} bytes
performance: ${str(result.performance)}
`; `;
} }
@ -151,7 +159,7 @@ async function setupCamera() {
try { try {
stream = await navigator.mediaDevices.getUserMedia({ stream = await navigator.mediaDevices.getUserMedia({
audio: false, audio: false,
video: { facingMode: (ui.facing ? 'user' : 'environment'), width: window.innerWidth, height: window.innerHeight }, video: { facingMode: (ui.facing ? 'user' : 'environment'), width: window.innerWidth, height: window.innerHeight, resizeMode: 'none' },
}); });
} catch (err) { } catch (err) {
output.innerText += '\nCamera permission denied'; output.innerText += '\nCamera permission denied';
@ -160,6 +168,10 @@ async function setupCamera() {
} }
if (stream) video.srcObject = stream; if (stream) video.srcObject = stream;
else return null; else return null;
const track = stream.getVideoTracks()[0];
const settings = track.getSettings();
log('camera settings:', settings);
camera = { name: track.label, width: settings.width, height: settings.height, facing: settings.facingMode === 'user' ? 'front' : 'back' };
return new Promise((resolve) => { return new Promise((resolve) => {
video.onloadeddata = async () => { video.onloadeddata = async () => {
video.width = video.videoWidth; video.width = video.videoWidth;
@ -169,8 +181,7 @@ async function setupCamera() {
if (live) video.play(); if (live) video.play();
ui.busy = false; ui.busy = false;
// do once more because onresize events can be delayed or skipped // do once more because onresize events can be delayed or skipped
if (video.width > window.innerWidth) await setupCamera(); // if (video.width > window.innerWidth) await setupCamera();
output.innerText += `\nCamera resolution: ${video.width} x ${video.height}`;
resolve(video); resolve(video);
}; };
}); });
@ -350,8 +361,8 @@ function setupMenu() {
menuFX.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">'); menuFX.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menuFX.addLabel('Image Filters'); menuFX.addLabel('Image Filters');
menuFX.addBool('Enabled', config.filter, 'enabled'); menuFX.addBool('Enabled', config.filter, 'enabled');
menuFX.addRange('Image width', config.filter, 'width', 100, 3840, 10, (val) => config.filter.width = parseInt(val)); menuFX.addRange('Image width', config.filter, 'width', 0, 3840, 10, (val) => config.filter.width = parseInt(val));
menuFX.addRange('Image height', config.filter, 'height', 100, 2160, 10, (val) => config.filter.height = parseInt(val)); menuFX.addRange('Image height', config.filter, 'height', 0, 2160, 10, (val) => config.filter.height = parseInt(val));
menuFX.addRange('Brightness', config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => config.filter.brightness = parseFloat(val)); menuFX.addRange('Brightness', config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => config.filter.brightness = parseFloat(val));
menuFX.addRange('Contrast', config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => config.filter.contrast = parseFloat(val)); menuFX.addRange('Contrast', config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => config.filter.contrast = parseFloat(val));
menuFX.addRange('Sharpness', config.filter, 'sharpness', 0, 1.0, 0.05, (val) => config.filter.sharpness = parseFloat(val)); menuFX.addRange('Sharpness', config.filter, 'sharpness', 0, 1.0, 0.05, (val) => config.filter.sharpness = parseFloat(val));

View File

@ -4,7 +4,7 @@ const profile = require('../profile.js');
const annotations = ['angry', 'discust', 'fear', 'happy', 'sad', 'surpise', 'neutral']; const annotations = ['angry', 'discust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];
const models = {}; const models = {};
let last = []; let last = [];
let frame = 0; let frame = Number.MAX_SAFE_INTEGER;
const multiplier = 1.5; const multiplier = 1.5;
async function load(config) { async function load(config) {

View File

@ -9,8 +9,6 @@ const profile = require('./profile.js');
const defaults = require('../config.js').default; const defaults = require('../config.js').default;
const app = require('../package.json'); const app = require('../package.json');
let first = true;
// static config override for non-video detection // static config override for non-video detection
const override = { const override = {
face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, emotion: { skipFrames: 0 } },
@ -42,19 +40,6 @@ function mergeDeep(...objects) {
}, {}); }, {});
} }
function sanity(input) {
if (!input) return 'input is not defined';
if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
return 'input must be a tensor';
}
try {
tf.getBackend();
} catch {
return 'backend not loaded';
}
return null;
}
class Human { class Human {
constructor() { constructor() {
this.tf = tf; this.tf = tf;
@ -65,6 +50,8 @@ class Human {
this.state = 'idle'; this.state = 'idle';
this.numTensors = 0; this.numTensors = 0;
this.analyzeMemoryLeaks = false; this.analyzeMemoryLeaks = false;
this.checkSanity = false;
this.firstRun = true;
// internal temp canvases // internal temp canvases
this.inCanvas = null; this.inCanvas = null;
this.outCanvas = null; this.outCanvas = null;
@ -107,14 +94,28 @@ class Human {
if (leaked !== 0) this.log(...msg, leaked); if (leaked !== 0) this.log(...msg, leaked);
} }
sanity(input) {
if (!this.checkSanity) return null;
if (!input) return 'input is not defined';
if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
return 'input must be a tensor';
}
try {
tf.getBackend();
} catch {
return 'backend not loaded';
}
return null;
}
async load(userConfig) { async load(userConfig) {
if (userConfig) this.config = mergeDeep(defaults, userConfig); if (userConfig) this.config = mergeDeep(defaults, userConfig);
if (first) { if (this.firstRun) {
this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`); this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
this.log('configuration:', this.config); this.log('configuration:', this.config);
this.log('flags:', tf.ENV.flags); this.log('flags:', tf.ENV.flags);
first = false; this.firstRun = false;
} }
if (this.config.face.enabled && !this.models.facemesh) { if (this.config.face.enabled && !this.models.facemesh) {
@ -183,7 +184,7 @@ class Human {
else if (this.config.filter.height > 0) targetWidth = originalWidth * (this.config.filter.height / originalHeight); else if (this.config.filter.height > 0) targetWidth = originalWidth * (this.config.filter.height / originalHeight);
if (this.config.filter.height > 0) targetHeight = this.config.filter.height; if (this.config.filter.height > 0) targetHeight = this.config.filter.height;
else if (this.config.filter.width > 0) targetHeight = originalHeight * (this.config.filter.width / originalWidth); else if (this.config.filter.width > 0) targetHeight = originalHeight * (this.config.filter.width / originalWidth);
if (!this.inCanvas || (this.inCanvas.width !== originalWidth) || (this.inCanvas.height !== originalHeight)) { if (!this.inCanvas || (this.inCanvas.width !== targetWidth) || (this.inCanvas.height !== targetHeight)) {
this.inCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas'); this.inCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
if (this.inCanvas.width !== targetWidth) this.inCanvas.width = targetWidth; if (this.inCanvas.width !== targetWidth) this.inCanvas.width = targetWidth;
if (this.inCanvas.height !== targetHeight) this.inCanvas.height = targetHeight; if (this.inCanvas.height !== targetHeight) this.inCanvas.height = targetHeight;
@ -248,7 +249,7 @@ class Human {
// sanity checks // sanity checks
this.state = 'check'; this.state = 'check';
const error = sanity(input); const error = this.sanity(input);
if (error) { if (error) {
this.log(error, input); this.log(error, input);
return { error }; return { error };