mirror of https://github.com/vladmandic/human
implelented input resizing
parent
c731892cc5
commit
fedbb1aac4
12
README.md
12
README.md
|
@ -37,7 +37,7 @@ Detailed configuration options are explained below, but they are best seen in th
|
|||
## Installation
|
||||
|
||||
**Important**
|
||||
*The packaged (IIFE and ESM) version of `Human` includes `TensorFlow/JS (TFJS) 2.6.0` library which can be accessed via `human.tf`*
|
||||
*The packaged (IIFE and ESM) version of `Human` includes `TensorFlow/JS (TFJS) 2.7.0` library which can be accessed via `human.tf`*
|
||||
*You should NOT manually load another instance of `tfjs`, but if you do, be aware of possible version conflicts*
|
||||
|
||||
There are multiple ways to use `Human` library, pick one that suits you:
|
||||
|
@ -253,6 +253,12 @@ config = {
|
|||
filter: { // note: image filters are only available in Browser environments and not in NodeJS as they require WebGL for processing
|
||||
enabled: true, // enable image pre-processing filters
|
||||
return: true, // return processed canvas imagedata in result
|
||||
width: 0, // resize input width
|
||||
height: 0, // resize input height
|
||||
// usefull on low-performance devices to reduce the size of processed input
|
||||
// if both width and height are set to 0, there is no resizing
|
||||
// if just one is set, second one is scaled automatically
|
||||
// if both are set, values are used as-is
|
||||
brightness: 0, // range: -1 (darken) to 1 (lighten)
|
||||
contrast: 0, // range: -1 (reduce contrast) to 1 (increase contrast)
|
||||
sharpness: 0, // range: 0 (no sharpening) to 1 (maximum sharpening)
|
||||
|
@ -436,7 +442,7 @@ Performance will vary depending on your hardware, but also on number of resoluti
|
|||
|
||||
For example, it can perform multiple face detections at 60+ FPS, but drops to ~15 FPS on a medium complex images if all modules are enabled
|
||||
|
||||
### Performance per module on a **notebook** with nVidia GTX1050 GPU:
|
||||
### Performance per module on a **notebook** with nVidia GTX1050 GPU on a FullHD input:
|
||||
|
||||
- Enabled all: 15 FPS
|
||||
- Image filters: 80 FPS (standalone)
|
||||
|
@ -449,7 +455,7 @@ For example, it can perform multiple face detections at 60+ FPS, but drops to ~1
|
|||
- Hand: 40 FPS (standalone)
|
||||
- Body: 50 FPS (standalone)
|
||||
|
||||
### Performance per module on a **smartphone** with Snapdragon 855:
|
||||
### Performance per module on a **smartphone** with Snapdragon 855 on a FullHD input:
|
||||
|
||||
- Enabled all: 3 FPS
|
||||
- Image filters: 30 FPS (standalone)
|
||||
|
|
|
@ -10,6 +10,11 @@ export default {
|
|||
videoOptimized: true, // perform additional optimizations when input is video, must be disabled for images
|
||||
filter: {
|
||||
enabled: true, // enable image pre-processing filters
|
||||
width: 0, // resize input width
|
||||
height: 0, // resize input height
|
||||
// if both width and height are set to 0, there is no resizing
|
||||
// if just one is set, second one is scaled automatically
|
||||
// if both are set, values are used as-is
|
||||
return: true, // return processed canvas imagedata in result
|
||||
brightness: 0, // range: -1 (darken) to 1 (lighten)
|
||||
contrast: 0, // range: -1 (reduce contrast) to 1 (increase contrast)
|
||||
|
|
|
@ -7,6 +7,7 @@ const human = new Human();
|
|||
// ui options
|
||||
const ui = {
|
||||
baseColor: 'rgba(173, 216, 230, 0.3)', // this is 'lightblue', just with alpha channel
|
||||
baseBackground: 'rgba(50, 50, 50, 1)', // this is 'lightblue', just with alpha channel
|
||||
baseLabel: 'rgba(173, 216, 230, 0.9)',
|
||||
baseFontProto: 'small-caps {size} "Segoe UI"',
|
||||
baseLineWidth: 16,
|
||||
|
@ -29,7 +30,23 @@ const ui = {
|
|||
// configuration overrides
|
||||
const config = {
|
||||
backend: 'webgl', // if you want to use 'wasm' backend, enable script load of tf and tf-backend-wasm in index.html
|
||||
filter: { enabled: true, brightness: 0, contrast: 0, sharpness: 0, blur: 0, saturation: 0, hue: 0, negative: false, sepia: false, vintage: false, kodachrome: false, technicolor: false, polaroid: false, pixelate: 0 },
|
||||
filter: {
|
||||
enabled: true,
|
||||
width: 720,
|
||||
height: 0,
|
||||
brightness: 0,
|
||||
contrast: 0,
|
||||
sharpness: 0,
|
||||
blur: 0,
|
||||
saturation: 0,
|
||||
hue: 0,
|
||||
negative: false,
|
||||
sepia: false,
|
||||
vintage: false,
|
||||
kodachrome: false,
|
||||
technicolor: false,
|
||||
polaroid: false,
|
||||
pixelate: 0 },
|
||||
videoOptimized: true,
|
||||
face: {
|
||||
enabled: true,
|
||||
|
@ -80,7 +97,9 @@ function drawResults(input, result, canvas) {
|
|||
|
||||
// draw image from video
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (result.canvas) ctx.drawImage(result.canvas, 0, 0, result.canvas.width, result.canvas.height, 0, 0, canvas.width, canvas.height);
|
||||
ctx.fillStyle = ui.baseBackground;
|
||||
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
||||
if (result.canvas) ctx.drawImage(result.canvas, 0, 0, result.canvas.width, result.canvas.height, 0, 0, result.canvas.width, result.canvas.height);
|
||||
else ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
|
||||
// draw all results
|
||||
draw.face(result.face, canvas, ui, human.facemesh.triangulation);
|
||||
|
@ -130,8 +149,8 @@ async function setupCamera() {
|
|||
video.onloadeddata = async () => {
|
||||
video.width = video.videoWidth;
|
||||
video.height = video.videoHeight;
|
||||
canvas.width = video.videoWidth;
|
||||
canvas.height = video.videoHeight;
|
||||
canvas.width = video.width;
|
||||
canvas.height = video.height;
|
||||
if (live) video.play();
|
||||
ui.busy = false;
|
||||
// do once more because onresize events can be delayed or skipped
|
||||
|
@ -194,8 +213,8 @@ async function processImage(input) {
|
|||
const canvas = document.getElementById('canvas');
|
||||
image.width = image.naturalWidth;
|
||||
image.height = image.naturalHeight;
|
||||
canvas.width = image.naturalWidth;
|
||||
canvas.height = image.naturalHeight;
|
||||
canvas.width = config.filter.width && config.filter.width > 0 ? config.filter.width : image.naturalWidth;
|
||||
canvas.height = config.filter.height && config.filter.height > 0 ? config.filter.height : image.naturalHeight;
|
||||
const result = await human.detect(image, config);
|
||||
drawResults(image, result, canvas);
|
||||
const thumb = document.createElement('canvas');
|
||||
|
@ -302,6 +321,8 @@ function setupMenu() {
|
|||
menuFX.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menuFX.addLabel('Image Filters');
|
||||
menuFX.addBool('Enabled', config.filter, 'enabled');
|
||||
menuFX.addRange('Image width', config.filter, 'width', 100, 3840, 10, (val) => config.filter.width = parseInt(val));
|
||||
menuFX.addRange('Image height', config.filter, 'height', 100, 2160, 10, (val) => config.filter.height = parseInt(val));
|
||||
menuFX.addRange('Brightness', config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => config.filter.brightness = parseFloat(val));
|
||||
menuFX.addRange('Contrast', config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => config.filter.contrast = parseFloat(val));
|
||||
menuFX.addRange('Sharpness', config.filter, 'sharpness', 0, 1.0, 0.05, (val) => config.filter.sharpness = parseFloat(val));
|
||||
|
|
14
package.json
14
package.json
|
@ -22,19 +22,19 @@
|
|||
"dependencies": {},
|
||||
"peerDependencies": {},
|
||||
"devDependencies": {
|
||||
"@tensorflow/tfjs": "^2.7.0",
|
||||
"@tensorflow/tfjs-node": "^2.7.0",
|
||||
"@vladmandic/pilogger": "^0.2.6",
|
||||
"dayjs": "^1.9.3",
|
||||
"simple-git": "^2.21.0",
|
||||
"@tensorflow/tfjs": "^2.6.0",
|
||||
"@tensorflow/tfjs-node": "^2.6.0",
|
||||
"esbuild": "^0.7.15",
|
||||
"eslint": "^7.10.0",
|
||||
"dayjs": "^1.9.4",
|
||||
"esbuild": "^0.7.21",
|
||||
"eslint": "^7.12.1",
|
||||
"eslint-config-airbnb-base": "^14.2.0",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
"eslint-plugin-json": "^2.1.2",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-promise": "^4.2.1",
|
||||
"rimraf": "^3.0.2"
|
||||
"rimraf": "^3.0.2",
|
||||
"simple-git": "^2.21.0"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
||||
|
|
16
src/human.js
16
src/human.js
|
@ -130,12 +130,20 @@ class Human {
|
|||
// let imageData;
|
||||
let filtered;
|
||||
if (this.fx && this.config.filter.enabled && !(input instanceof tf.Tensor)) {
|
||||
const width = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
|
||||
const height = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));
|
||||
const offscreenCanvas = new OffscreenCanvas(width, height);
|
||||
const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
|
||||
const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));
|
||||
|
||||
let targetWidth = originalWidth;
|
||||
if (this.config.filter.width > 0) targetWidth = this.config.filter.width;
|
||||
else if (this.config.filter.height > 0) targetWidth = originalWidth * (this.config.filter.height / originalHeight);
|
||||
let targetHeight = originalHeight;
|
||||
if (this.config.filter.height > 0) targetHeight = this.config.filter.height;
|
||||
else if (this.config.filter.width > 0) targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||
|
||||
const offscreenCanvas = new OffscreenCanvas(targetWidth, targetHeight);
|
||||
const ctx = offscreenCanvas.getContext('2d');
|
||||
if (input instanceof ImageData) ctx.putImageData(input, 0, 0);
|
||||
else ctx.drawImage(input, 0, 0, width, height, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
||||
else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
||||
this.fx.reset();
|
||||
this.fx.addFilter('brightness', this.config.filter.brightness); // must have at least one filter enabled
|
||||
if (this.config.filter.contrast !== 0) this.fx.addFilter('contrast', this.config.filter.contrast);
|
||||
|
|
Loading…
Reference in New Issue