mirror of https://github.com/vladmandic/human
compatibility notes
parent
0d523c3744
commit
dfc67f0188
12
README.md
12
README.md
|
@ -229,7 +229,8 @@ config = {
|
||||||
scoped: false, // enable scoped runs
|
scoped: false, // enable scoped runs
|
||||||
// some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance
|
// some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance
|
||||||
// typically not needed
|
// typically not needed
|
||||||
filter: {
|
videoOptimized: true, // perform additional optimizations when input is video, must be disabled for images
|
||||||
|
filter: { // note: image filters are only available in Browser environments and not in NodeJS as they require WebGL for processing
|
||||||
enabled: true, // enable image pre-processing filters
|
enabled: true, // enable image pre-processing filters
|
||||||
return: true, // return processed canvas imagedata in result
|
return: true, // return processed canvas imagedata in result
|
||||||
brightness: 0, // range: -1 (darken) to 1 (lighten)
|
brightness: 0, // range: -1 (darken) to 1 (lighten)
|
||||||
|
@ -432,6 +433,15 @@ For performance details, see output of `result.performance` object during runtim
|
||||||
|
|
||||||
<hr>
|
<hr>
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
`Human` library can be used in any modern Browser or NodeJS environment, but there are several items to be aware of:
|
||||||
|
|
||||||
|
- **NodeJS**: Due to a missing feature in `tfjs-node`, only some models are available <https://github.com/tensorflow/tfjs/issues/4066>
|
||||||
|
- **Browser**: `filters` module cannot be used when using web workers <https://github.com/phoboslab/WebGLImageFilter/issues/27>
|
||||||
|
|
||||||
|
<hr>
|
||||||
|
|
||||||
## Credits
|
## Credits
|
||||||
|
|
||||||
- Face Detection: [**MediaPipe BlazeFace**](https://drive.google.com/file/d/1f39lSzU5Oq-j_OXgS67KfN5wNsoeAZ4V/view)
|
- Face Detection: [**MediaPipe BlazeFace**](https://drive.google.com/file/d/1f39lSzU5Oq-j_OXgS67KfN5wNsoeAZ4V/view)
|
||||||
|
|
|
@ -7,6 +7,7 @@ export default {
|
||||||
scoped: false, // enable scoped runs
|
scoped: false, // enable scoped runs
|
||||||
// some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance
|
// some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance
|
||||||
// typically not needed
|
// typically not needed
|
||||||
|
videoOptimized: true, // perform additional optimizations when input is video, must be disabled for images
|
||||||
filter: {
|
filter: {
|
||||||
enabled: true, // enable image pre-processing filters
|
enabled: true, // enable image pre-processing filters
|
||||||
return: true, // return processed canvas imagedata in result
|
return: true, // return processed canvas imagedata in result
|
||||||
|
|
|
@ -28,6 +28,7 @@ const ui = {
|
||||||
const config = {
|
const config = {
|
||||||
backend: 'webgl', // if you want to use 'wasm' backend, enable script load of tf and tf-backend-wasm in index.html
|
backend: 'webgl', // if you want to use 'wasm' backend, enable script load of tf and tf-backend-wasm in index.html
|
||||||
filter: { enabled: true, brightness: 0, contrast: 0, sharpness: 0, blur: 0, saturation: 0, hue: 0, negative: false, sepia: false, vintage: false, kodachrome: false, technicolor: false, polaroid: false, pixelate: 0 },
|
filter: { enabled: true, brightness: 0, contrast: 0, sharpness: 0, blur: 0, saturation: 0, hue: 0, negative: false, sepia: false, vintage: false, kodachrome: false, technicolor: false, polaroid: false, pixelate: 0 },
|
||||||
|
videoOptimized: true,
|
||||||
face: {
|
face: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
detector: { maxFaces: 10, skipFrames: 10, minConfidence: 0.5, iouThreshold: 0.3, scoreThreshold: 0.7 },
|
detector: { maxFaces: 10, skipFrames: 10, minConfidence: 0.5, iouThreshold: 0.3, scoreThreshold: 0.7 },
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import human from '../dist/human.esm.js';
|
import human from '../dist/human.esm.js';
|
||||||
|
|
||||||
let config;
|
let config;
|
||||||
|
let busy = false;
|
||||||
|
|
||||||
const log = (...msg) => {
|
const log = (...msg) => {
|
||||||
// eslint-disable-next-line no-console
|
// eslint-disable-next-line no-console
|
||||||
|
@ -8,6 +9,8 @@ const log = (...msg) => {
|
||||||
};
|
};
|
||||||
|
|
||||||
onmessage = async (msg) => {
|
onmessage = async (msg) => {
|
||||||
|
if (busy) return;
|
||||||
|
busy = true;
|
||||||
// worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
|
// worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
|
||||||
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
||||||
config = msg.data.config;
|
config = msg.data.config;
|
||||||
|
@ -19,4 +22,5 @@ onmessage = async (msg) => {
|
||||||
log('Worker thread error:', err.message);
|
log('Worker thread error:', err.message);
|
||||||
}
|
}
|
||||||
postMessage(result);
|
postMessage(result);
|
||||||
|
busy = false;
|
||||||
};
|
};
|
||||||
|
|
22
src/human.js
22
src/human.js
|
@ -74,15 +74,6 @@ function mergeDeep(...objects) {
|
||||||
|
|
||||||
function sanity(input) {
|
function sanity(input) {
|
||||||
if (!input) return 'input is not defined';
|
if (!input) return 'input is not defined';
|
||||||
if (!(input instanceof tf.Tensor)
|
|
||||||
|| (tf.ENV.flags.IS_BROWSER
|
|
||||||
&& (input instanceof ImageData || input instanceof HTMLImageElement || input instanceof HTMLCanvasElement || input instanceof HTMLVideoElement || input instanceof HTMLMediaElement))) {
|
|
||||||
const width = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
|
|
||||||
if (!width || (width === 0)) return 'input is empty';
|
|
||||||
}
|
|
||||||
if (tf.ENV.flags.IS_BROWSER && (input instanceof HTMLVideoElement || input instanceof HTMLMediaElement)) {
|
|
||||||
if (input.readyState && (input.readyState <= 2)) return 'input is not ready';
|
|
||||||
}
|
|
||||||
if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
|
if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
|
||||||
return 'input must be a tensor';
|
return 'input must be a tensor';
|
||||||
}
|
}
|
||||||
|
@ -127,15 +118,18 @@ function tfImage(input) {
|
||||||
let filtered;
|
let filtered;
|
||||||
if (tf.ENV.flags.IS_BROWSER && config.filter.enabled && !(input instanceof tf.Tensor)) {
|
if (tf.ENV.flags.IS_BROWSER && config.filter.enabled && !(input instanceof tf.Tensor)) {
|
||||||
const width = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
|
const width = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
|
||||||
const height = input.naturalHeight || input.videoHeight || input.Height || (input.shape && (input.shape[2] > 0));
|
const height = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));
|
||||||
// if (!offscreenCanvas) offscreenCanvas = new OffscreenCanvas(width, height);
|
if (!offscreenCanvas) offscreenCanvas = new OffscreenCanvas(width, height);
|
||||||
|
/*
|
||||||
if (!offscreenCanvas) {
|
if (!offscreenCanvas) {
|
||||||
offscreenCanvas = document.createElement('canvas');
|
offscreenCanvas = document.createElement('canvas');
|
||||||
offscreenCanvas.width = width;
|
offscreenCanvas.width = width;
|
||||||
offscreenCanvas.height = height;
|
offscreenCanvas.height = height;
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
const ctx = offscreenCanvas.getContext('2d');
|
const ctx = offscreenCanvas.getContext('2d');
|
||||||
ctx.drawImage(input, 0, 0, width, height, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
if (input instanceof ImageData) ctx.putImageData(input, 0, 0);
|
||||||
|
else ctx.drawImage(input, 0, 0, width, height, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
||||||
if (!fx) fx = new fxImage.Canvas();
|
if (!fx) fx = new fxImage.Canvas();
|
||||||
else fx.reset();
|
else fx.reset();
|
||||||
fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled
|
fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled
|
||||||
|
@ -173,8 +167,8 @@ async function detect(input, userConfig = {}) {
|
||||||
let timeStamp;
|
let timeStamp;
|
||||||
|
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const shouldOverride = tf.ENV.flags.IS_NODE || (tf.ENV.flags.IS_BROWSER && !((input instanceof HTMLVideoElement) || (input instanceof HTMLMediaElement)));
|
config = mergeDeep(defaults, userConfig);
|
||||||
config = mergeDeep(defaults, userConfig, shouldOverride ? override : {});
|
if (!config.videoOptimized) config = mergeDeep(config, override);
|
||||||
perf.config = Math.trunc(now() - timeStamp);
|
perf.config = Math.trunc(now() - timeStamp);
|
||||||
|
|
||||||
// sanity checks
|
// sanity checks
|
||||||
|
|
Loading…
Reference in New Issue