new frame change detection algorithm

pull/233/head
Vladimir Mandic 2021-11-06 10:21:51 -04:00
parent 8de38b07eb
commit df933ccf8e
43 changed files with 79045 additions and 3376 deletions

View File

@ -126,7 +126,7 @@
"format": "iife",
"input": "src/human.ts",
"output": "dist/human.js",
"minify": true,
"minify": false,
"globalName": "Human",
"external": ["fs", "os", "buffer", "util"]
},

View File

@ -11,11 +11,10 @@
### **HEAD -> main** 2021/11/05 mandic00@live.com
- add histogram equalization
- implement wasm missing ops
- performance and memory optimizations
### **origin/main** 2021/11/04 mandic00@live.com
- fix react compatibility issues
- improve box rescaling for all modules
- improve precision using wasm backend
- refactor predict with execute

View File

@ -43,15 +43,18 @@ MoveNet MultiPose model does not work with WASM backend due to missing F32 broad
- Supports all modules on all backends
via custom implementation of missing kernel ops
- New frame change detection algorithm used for cache determination
based on temporal input difference
- New optional input histogram equalization
auto-level input for optimal brightness/contrast via `config.filter.equalization`
- Performance and precision improvements
**face** and **hand** modules
**face**, **hand** and **gestures** modules
- Use custom built TFJS for bundled version
reduced bundle size and built-in support for all backends
`nobundle` and `node` versions link to standard `@tensorflow` packages
- Add optional input histogram equalization
auto-level input for optimal brightness/contrast via `config.filter.equalization`
- Fix **ReactJS** compatibility
- Better precision using **WASM**
Previous issues due to math low-precision in WASM implementation
- Full **TS** type definitions for all modules and imports
- Focus on simplified demo
<https://vladmandic.github.io/human/demo/typescript/>

View File

@ -9,25 +9,28 @@
import Human from '../../dist/human.esm.js';
const userConfig = {
backend: 'wasm',
async: false,
backend: 'humangl',
async: true,
warmup: 'none',
cacheSensitivity: 0,
debug: true,
modelBasePath: '../../models/',
deallocate: true,
filter: {
enabled: true,
equalization: true,
},
face: {
enabled: true,
detector: { rotation: true, return: true, maxDetected: 50 },
mesh: { enabled: true },
embedding: { enabled: false },
iris: { enabled: true },
iris: { enabled: false },
emotion: { enabled: true },
description: { enabled: true },
},
hand: { enabled: false },
gesture: { enabled: true },
gesture: { enabled: false },
body: { enabled: false },
filter: { enabled: true },
segmentation: { enabled: false },
};
@ -73,9 +76,7 @@ async function SelectFaceCanvas(face) {
const squeeze = human.tf.squeeze(enhanced);
const normalize = human.tf.div(squeeze, 255);
await human.tf.browser.toPixels(normalize, c);
human.tf.dispose(enhanced);
human.tf.dispose(squeeze);
human.tf.dispose(normalize);
human.tf.dispose([enhanced, squeeze, normalize]);
const ctx = c.getContext('2d');
ctx.font = 'small-caps 0.4rem "Lato"';
ctx.fillStyle = 'rgba(255, 255, 255, 1)';
@ -134,7 +135,7 @@ async function SelectFaceCanvas(face) {
title('Selected Face');
}
async function AddFaceCanvas(index, res, fileName) {
function AddFaceCanvas(index, res, fileName) {
all[index] = res.face;
let ok = false;
for (const i in res.face) {
@ -161,7 +162,7 @@ async function AddFaceCanvas(index, res, fileName) {
});
// if we actually got face image tensor, draw canvas with that face
if (res.face[i].tensor) {
await human.tf.browser.toPixels(res.face[i].tensor, canvas);
human.tf.browser.toPixels(res.face[i].tensor, canvas);
document.getElementById('faces').appendChild(canvas);
const ctx = canvas.getContext('2d');
if (!ctx) return false;
@ -169,7 +170,7 @@ async function AddFaceCanvas(index, res, fileName) {
ctx.fillStyle = 'rgba(255, 255, 255, 1)';
ctx.fillText(`${res.face[i].age}y ${(100 * (res.face[i].genderScore || 0)).toFixed(1)}% ${res.face[i].gender}`, 4, canvas.height - 6);
const arr = db.map((rec) => rec.embedding);
const result = await human.match(res.face[i].embedding, arr);
const result = human.match(res.face[i].embedding, arr);
ctx.font = 'small-caps 1rem "Lato"';
if (result.similarity && res.similarity > minScore) ctx.fillText(`${(100 * result.similarity).toFixed(1)}% ${db[result.index].name}`, 4, canvas.height - 30);
}
@ -184,7 +185,7 @@ async function AddImageElement(index, image, length) {
const img = new Image(128, 128);
img.onload = () => { // must wait until image is loaded
human.detect(img, userConfig).then(async (res) => {
const ok = await AddFaceCanvas(index, res, image); // then wait until image is analyzed
const ok = AddFaceCanvas(index, res, image); // then wait until image is analyzed
// log('Add image:', index + 1, image, 'faces:', res.face.length);
if (ok) document.getElementById('images').appendChild(img); // and finally we can add it
resolve(true);
@ -199,7 +200,7 @@ async function AddImageElement(index, image, length) {
});
}
async function createFaceMatchDB() {
function createFaceMatchDB() {
log('Creating Faces DB...');
for (const image of all) {
for (const face of image) db.push({ name: 'unknown', source: face.fileName, embedding: face.embedding });
@ -246,6 +247,9 @@ async function main() {
// images = ['/samples/in/solvay1927.jpg'];
// download and analyze all images
// const promises = [];
// for (let i = 0; i < images.length; i++) promises.push(AddImageElement(i, images[i], images.length));
// await Promise.all(promises);
for (let i = 0; i < images.length; i++) await AddImageElement(i, images[i], images.length);
// print stats
@ -254,7 +258,7 @@ async function main() {
log(human.tf.engine().memory());
// if we didn't download db, generate it from current faces
if (!db || db.length === 0) await createFaceMatchDB();
if (!db || db.length === 0) createFaceMatchDB();
title('');
log('Ready');

View File

@ -712,6 +712,7 @@ function setupMenu() {
menu.image = new Menu(document.body, '', { top, left: x[1] });
menu.image.addBool('enabled', userConfig.filter, 'enabled', (val) => userConfig.filter.enabled = val);
menu.image.addBool('histogram equalization', userConfig.filter, 'equalization', (val) => userConfig.filter.equalization = val);
ui.menuWidth = menu.image.addRange('image width', userConfig.filter, 'width', 0, 3840, 10, (val) => userConfig.filter.width = parseInt(val));
ui.menuHeight = menu.image.addRange('image height', userConfig.filter, 'height', 0, 2160, 10, (val) => userConfig.filter.height = parseInt(val));
menu.image.addHTML('<hr style="border-style: inset; border-color: dimgray">');

View File

@ -7,7 +7,8 @@
// demo/typescript/index.ts
import Human from "../../dist/human.esm.js";
var humanConfig = {
modelBasePath: "../../models"
modelBasePath: "../../models",
filter: { equalization: false }
};
var human = new Human(humanConfig);
human.env["perfadd"] = false;
@ -79,8 +80,8 @@ async function drawLoop() {
setTimeout(drawLoop, 30);
}
async function main() {
log("human version:", human.version, "tfjs version:", human.tf.version_core);
log("platform:", human.env.platform, "agent:", human.env.agent);
log("human version:", human.version, "| tfjs version:", human.tf.version_core);
log("platform:", human.env.platform, "| agent:", human.env.agent);
status("loading...");
await human.load();
log("backend:", human.tf.getBackend(), "| available:", human.env.backends);

File diff suppressed because one or more lines are too long

View File

@ -13,7 +13,7 @@ import Human from '../../dist/human.esm.js'; // equivalent of @vladmandic/human
const humanConfig = { // user configuration for human, used to fine-tune behavior
modelBasePath: '../../models',
filter: { equalization: true },
filter: { equalization: false },
// backend: 'webgpu',
// async: true,
// face: { enabled: false, detector: { rotation: true }, iris: { enabled: false }, description: { enabled: false }, emotion: { enabled: false } },
@ -99,8 +99,8 @@ async function drawLoop() { // main screen refresh loop
}
async function main() { // main entry point
log('human version:', human.version, 'tfjs version:', human.tf.version_core);
log('platform:', human.env.platform, 'agent:', human.env.agent);
log('human version:', human.version, '| tfjs version:', human.tf.version_core);
log('platform:', human.env.platform, '| agent:', human.env.agent);
status('loading...');
await human.load(); // preload all models
log('backend:', human.tf.getBackend(), '| available:', human.env.backends);

View File

@ -117,6 +117,7 @@ var config = {
warmup: "full",
cacheSensitivity: 0.7,
skipAllowed: false,
deallocate: false,
filter: {
enabled: true,
equalization: false,
@ -233,7 +234,9 @@ __export(tfjs_esm_exports, {
version: () => version9
});
__reExport(tfjs_esm_exports, dist_star);
__reExport(tfjs_esm_exports, dist_star2);
import * as dist_star from "@tensorflow/tfjs/dist/index.js";
import * as dist_star2 from "@tensorflow/tfjs-backend-webgl/dist/index.js";
import { Tensor } from "@tensorflow/tfjs/dist/index.js";
import { GraphModel } from "@tensorflow/tfjs-converter/dist/index";
var version = "3.11.0";
@ -907,8 +910,8 @@ function GLImageFilter() {
this.get = function() {
return filterChain;
};
this.apply = function(image25) {
resize(image25.width, image25.height);
this.apply = function(image24) {
resize(image24.width, image24.height);
drawCount = 0;
if (!sourceTexture)
sourceTexture = gl.createTexture();
@ -917,7 +920,7 @@ function GLImageFilter() {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image25);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image24);
for (let i = 0; i < filterChain.length; i++) {
lastInChain = i === filterChain.length - 1;
const f = filterChain[i];
@ -925,24 +928,27 @@ function GLImageFilter() {
}
return fxcanvas;
};
this.draw = function(image25) {
this.draw = function(image24) {
this.add("brightness", 0);
return this.apply(image25);
return this.apply(image24);
};
}
// src/image/enhance.ts
function histogramEqualization(input) {
const channels = tfjs_esm_exports.split(input, 3, 2);
async function histogramEqualization(inputImage) {
const squeeze9 = inputImage.shape.length === 4 ? tfjs_esm_exports.squeeze(inputImage) : inputImage;
const channels = tfjs_esm_exports.split(squeeze9, 3, 2);
const min2 = [tfjs_esm_exports.min(channels[0]), tfjs_esm_exports.min(channels[1]), tfjs_esm_exports.min(channels[2])];
const max4 = [tfjs_esm_exports.max(channels[0]), tfjs_esm_exports.max(channels[1]), tfjs_esm_exports.max(channels[2])];
const sub6 = [tfjs_esm_exports.sub(channels[0], min2[0]), tfjs_esm_exports.sub(channels[1], min2[1]), tfjs_esm_exports.sub(channels[2], min2[2])];
const absMax = await Promise.all(max4.map((channel) => channel.data()));
const maxValue = 0.99 * Math.max(absMax[0][0], absMax[1][0], absMax[2][0]);
const sub7 = [tfjs_esm_exports.sub(channels[0], min2[0]), tfjs_esm_exports.sub(channels[1], min2[1]), tfjs_esm_exports.sub(channels[2], min2[2])];
const range = [tfjs_esm_exports.sub(max4[0], min2[0]), tfjs_esm_exports.sub(max4[1], min2[1]), tfjs_esm_exports.sub(max4[2], min2[2])];
const fact = [tfjs_esm_exports.div(255, range[0]), tfjs_esm_exports.div(255, range[1]), tfjs_esm_exports.div(255, range[2])];
const enh = [tfjs_esm_exports.mul(sub6[0], fact[0]), tfjs_esm_exports.mul(sub6[1], fact[1]), tfjs_esm_exports.mul(sub6[2], fact[2])];
const fact = [tfjs_esm_exports.div(maxValue, range[0]), tfjs_esm_exports.div(maxValue, range[1]), tfjs_esm_exports.div(maxValue, range[2])];
const enh = [tfjs_esm_exports.mul(sub7[0], fact[0]), tfjs_esm_exports.mul(sub7[1], fact[1]), tfjs_esm_exports.mul(sub7[2], fact[2])];
const rgb2 = tfjs_esm_exports.stack([enh[0], enh[1], enh[2]], 2);
const reshape8 = tfjs_esm_exports.reshape(rgb2, [1, input.shape[0], input.shape[1], 3]);
tfjs_esm_exports.dispose([...channels, ...min2, ...max4, ...sub6, ...range, ...fact, ...enh, rgb2]);
const reshape8 = tfjs_esm_exports.reshape(rgb2, [1, squeeze9.shape[0], squeeze9.shape[1], 3]);
tfjs_esm_exports.dispose([...channels, ...min2, ...max4, ...sub7, ...range, ...fact, ...enh, rgb2, squeeze9]);
return reshape8;
}
@ -952,6 +958,12 @@ var inCanvas = null;
var outCanvas = null;
var tmpCanvas = null;
var fx;
var last = {
inputSum: 0,
cacheDiff: 1,
sumMethod: 0,
inputTensor: void 0
};
function canvas(width, height) {
let c;
if (env.browser) {
@ -978,7 +990,7 @@ function copy(input, output) {
ctx.drawImage(input, 0, 0);
return outputCanvas;
}
function process2(input, config3, getTensor = true) {
async function process2(input, config3, getTensor = true) {
if (!input) {
if (config3.debug)
log("input is missing");
@ -1028,7 +1040,7 @@ function process2(input, config3, getTensor = true) {
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("input cannot determine dimension");
if (!inCanvas || inCanvas.width !== targetWidth || inCanvas.height !== targetHeight)
if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const inCtx = inCanvas.getContext("2d");
if (typeof ImageData !== "undefined" && input instanceof ImageData) {
@ -1037,13 +1049,13 @@ function process2(input, config3, getTensor = true) {
if (config3.filter.flip && typeof inCtx.translate !== "undefined") {
inCtx.translate(originalWidth, 0);
inCtx.scale(-1, 1);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
inCtx.setTransform(1, 0, 0, 1, 0, 0);
} else {
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
}
}
if (!outCanvas || inCanvas.width !== outCanvas.width || inCanvas.height !== outCanvas.height)
if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height))
outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env.webgl.supported) {
if (!fx)
@ -1131,51 +1143,31 @@ function process2(input, config3, getTensor = true) {
if (!pixels)
throw new Error("cannot create tensor from input");
const casted = tfjs_esm_exports.cast(pixels, "float32");
const tensor3 = config3.filter.equalization ? histogramEqualization(casted) : tfjs_esm_exports.expandDims(casted, 0);
const tensor3 = config3.filter.equalization ? await histogramEqualization(casted) : tfjs_esm_exports.expandDims(casted, 0);
tfjs_esm_exports.dispose([pixels, casted]);
return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null };
}
}
var lastInputSum = 0;
var lastCacheDiff = 1;
var benchmarked = 0;
var checksum = async (input) => {
const resizeFact = 48;
const reduced = tfjs_esm_exports.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]);
const tfSum = async () => {
const sumT = tfjs_esm_exports.sum(reduced);
const sum0 = await sumT.data();
tfjs_esm_exports.dispose(sumT);
return sum0[0];
};
const jsSum = async () => {
const reducedData = await reduced.data();
let sum0 = 0;
for (let i = 0; i < reducedData.length / 3; i++)
sum0 += reducedData[3 * i + 2];
return sum0;
};
if (benchmarked === 0) {
const t0 = now();
await jsSum();
const t1 = now();
await tfSum();
const t2 = now();
benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;
}
const res = benchmarked === 1 ? await jsSum() : await tfSum();
tfjs_esm_exports.dispose(reduced);
return res;
};
async function skip(config3, input) {
let skipFrame = false;
if (config3.cacheSensitivity === 0)
return false;
const sum2 = await checksum(input);
const diff = 100 * (Math.max(sum2, lastInputSum) / Math.min(sum2, lastInputSum) - 1);
lastInputSum = sum2;
let skipFrame = diff < Math.max(config3.cacheSensitivity, lastCacheDiff);
lastCacheDiff = diff > 10 * config3.cacheSensitivity ? 0 : diff;
skipFrame = skipFrame && lastCacheDiff > 0;
return skipFrame;
if (!last.inputTensor) {
last.inputTensor = tfjs_esm_exports.clone(input);
} else if (last.inputTensor.shape[1] !== input.shape[1] || last.inputTensor.shape[2] !== input.shape[2]) {
tfjs_esm_exports.dispose(last.inputTensor);
last.inputTensor = tfjs_esm_exports.clone(input);
} else {
const t = {};
t.diff = tfjs_esm_exports.sub(input, last.inputTensor);
t.squared = tfjs_esm_exports.mul(t.diff, t.diff);
t.sum = tfjs_esm_exports.sum(t.squared);
const diffSum = await t.sum.data();
const diffRelative = diffSum[0] / (input.shape[1] || 1) / (input.shape[2] || 1) / 255 / 3;
tfjs_esm_exports.dispose([last.inputTensor, t.diff, t.squared, t.sum]);
last.inputTensor = tfjs_esm_exports.clone(input);
skipFrame = diffRelative <= config3.cacheSensitivity;
}
return skipFrame;
}
@ -1329,7 +1321,7 @@ async function load2(config3) {
log("cached model:", model2["modelUrl"]);
return model2;
}
async function predict(image25, config3, idx, count2) {
async function predict(image24, config3, idx, count2) {
var _a, _b;
if (!model2)
return null;
@ -1341,7 +1333,7 @@ async function predict(image25, config3, idx, count2) {
}
skipped2 = 0;
return new Promise(async (resolve) => {
const resize = tfjs_esm_exports.image.resizeBilinear(image25, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
const resize = tfjs_esm_exports.image.resizeBilinear(image24, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
const res = model2 == null ? void 0 : model2.execute(resize);
const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100;
@ -4655,10 +4647,10 @@ var scaleBoxCoordinates = (box4, factor) => {
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
};
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
const h = image25.shape[1];
const w = image25.shape[2];
const crop2 = tfjs_esm_exports.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
var cutBoxFromImageAndResize = (box4, image24, cropSize) => {
const h = image24.shape[1];
const w = image24.shape[2];
const crop2 = tfjs_esm_exports.image.cropAndResize(image24, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const norm = tfjs_esm_exports.div(crop2, 255);
tfjs_esm_exports.dispose(crop2);
return norm;
@ -5157,7 +5149,7 @@ var labels = [
// src/object/centernet.ts
var model4;
var inputSize3 = 0;
var last = [];
var last2 = [];
var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) {
@ -5224,9 +5216,9 @@ async function process3(res, outputShape, config3) {
async function predict3(input, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && last2.length > 0) {
skipped4++;
return last;
return last2;
}
skipped4 = 0;
return new Promise(async (resolve) => {
@ -5236,7 +5228,7 @@ async function predict3(input, config3) {
lastTime3 = now();
tfjs_esm_exports.dispose(resize);
const obj = await process3(objectT, outputSize2, config3);
last = obj;
last2 = obj;
resolve(obj);
});
}
@ -5306,7 +5298,7 @@ function max2d(inputs, minScore) {
return [0, 0, newScore];
});
}
async function predict4(image25, config3) {
async function predict4(image24, config3) {
const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
const skipFrame = skipped5 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
@ -5319,7 +5311,7 @@ async function predict4(image25, config3) {
const tensor3 = tfjs_esm_exports.tidy(() => {
if (!(model5 == null ? void 0 : model5.inputs[0].shape))
return null;
const resize = tfjs_esm_exports.image.resizeBilinear(image25, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
const resize = tfjs_esm_exports.image.resizeBilinear(image24, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
const enhance3 = tfjs_esm_exports.mul(resize, 2);
const norm = enhance3.sub(1);
return norm;
@ -5331,10 +5323,10 @@ async function predict4(image25, config3) {
tfjs_esm_exports.dispose(tensor3);
if (resT) {
cache2.keypoints.length = 0;
const squeeze8 = resT.squeeze();
const squeeze9 = resT.squeeze();
tfjs_esm_exports.dispose(resT);
const stack4 = squeeze8.unstack(2);
tfjs_esm_exports.dispose(squeeze8);
const stack4 = squeeze9.unstack(2);
tfjs_esm_exports.dispose(squeeze9);
for (let id = 0; id < stack4.length; id++) {
const [x2, y2, partScore] = max2d(stack4[id], config3.body.minConfidence);
if (partScore > (((_a = config3.body) == null ? void 0 : _a.minConfidence) || 0)) {
@ -5346,8 +5338,8 @@ async function predict4(image25, config3) {
y2 / model5.inputs[0].shape[1]
],
position: [
Math.round(image25.shape[2] * x2 / model5.inputs[0].shape[2]),
Math.round(image25.shape[1] * y2 / model5.inputs[0].shape[1])
Math.round(image24.shape[2] * x2 / model5.inputs[0].shape[2]),
Math.round(image24.shape[1] * y2 / model5.inputs[0].shape[1])
]
});
}
@ -5388,7 +5380,7 @@ async function predict4(image25, config3) {
// src/gear/emotion.ts
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model6;
var last2 = [];
var last3 = [];
var lastCount2 = 0;
var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER;
@ -5407,15 +5399,15 @@ async function load6(config3) {
log("cached model:", model6["modelUrl"]);
return model6;
}
async function predict5(image25, config3, idx, count2) {
async function predict5(image24, config3, idx, count2) {
var _a, _b;
if (!model6)
return null;
const skipFrame = skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last2[idx] && last2[idx].length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last3[idx] && last3[idx].length > 0) {
skipped6++;
return last2[idx];
return last3[idx];
}
skipped6 = 0;
return new Promise(async (resolve) => {
@ -5424,7 +5416,7 @@ async function predict5(image25, config3, idx, count2) {
if ((_a2 = config3.face.emotion) == null ? void 0 : _a2.enabled) {
const t = {};
const inputSize8 = (model6 == null ? void 0 : model6.inputs[0].shape) ? model6.inputs[0].shape[2] : 0;
t.resize = tfjs_esm_exports.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
t.resize = tfjs_esm_exports.image.resizeBilinear(image24, [inputSize8, inputSize8], false);
[t.red, t.green, t.blue] = tfjs_esm_exports.split(t.resize, 3, 3);
t.redNorm = tfjs_esm_exports.mul(t.red, rgb[0]);
t.greenNorm = tfjs_esm_exports.mul(t.green, rgb[1]);
@ -5442,7 +5434,7 @@ async function predict5(image25, config3, idx, count2) {
obj.sort((a, b) => b.score - a.score);
Object.keys(t).forEach((tensor3) => tfjs_esm_exports.dispose(t[tensor3]));
}
last2[idx] = obj;
last3[idx] = obj;
lastCount2 = count2;
resolve(obj);
});
@ -5588,7 +5580,7 @@ var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var enlargeFact = 1.6;
async function predict6(input, config3) {
var _a, _b, _c, _d, _e, _f, _g, _h;
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
const skipTime = (((_a = config3.face.detector) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
@ -5632,8 +5624,13 @@ async function predict6(input, config3) {
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
}
if ((_f = config3 == null ? void 0 : config3.filter) == null ? void 0 : _f.equalization) {
const equilized = await histogramEqualization(face5.tensor);
tfjs_esm_exports.dispose(face5.tensor);
face5.tensor = equilized;
}
face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!((_f = config3.face.mesh) == null ? void 0 : _f.enabled)) {
if (!((_g = config3.face.mesh) == null ? void 0 : _g.enabled)) {
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
@ -5655,10 +5652,10 @@ async function predict6(input, config3) {
const coordsReshaped = tfjs_esm_exports.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tfjs_esm_exports.dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face5.faceScore < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
if (face5.faceScore < (((_h = config3.face.detector) == null ? void 0 : _h.minConfidence) || 1)) {
box4.confidence = face5.faceScore;
} else {
if ((_h = config3.face.iris) == null ? void 0 : _h.enabled)
if ((_i = config3.face.iris) == null ? void 0 : _i.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
@ -5698,7 +5695,7 @@ var uvmap = UV468;
// src/face/faceres.ts
var model9;
var last3 = [];
var last4 = [];
var lastTime7 = 0;
var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER;
@ -5726,15 +5723,15 @@ function enhance2(input) {
tfjs_esm_exports.dispose(crop2);
return norm;
}
async function predict7(image25, config3, idx, count2) {
async function predict7(image24, config3, idx, count2) {
var _a, _b, _c, _d;
if (!model9)
return null;
const skipFrame = skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last3[idx]) == null ? void 0 : _c.age) && ((_d = last3[idx]) == null ? void 0 : _d.age) > 0) {
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last4[idx]) == null ? void 0 : _c.age) && ((_d = last4[idx]) == null ? void 0 : _d.age) > 0) {
skipped8++;
return last3[idx];
return last4[idx];
}
skipped8 = 0;
return new Promise(async (resolve) => {
@ -5746,7 +5743,7 @@ async function predict7(image25, config3, idx, count2) {
descriptor: []
};
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
const enhanced = enhance2(image25);
const enhanced = enhance2(image24);
const resT = model9 == null ? void 0 : model9.execute(enhanced);
lastTime7 = now();
tfjs_esm_exports.dispose(enhanced);
@ -5768,7 +5765,7 @@ async function predict7(image25, config3, idx, count2) {
obj.descriptor = Array.from(descriptor);
resT.forEach((t) => tfjs_esm_exports.dispose(t));
}
last3[idx] = obj;
last4[idx] = obj;
lastCount3 = count2;
resolve(obj);
});
@ -5787,16 +5784,16 @@ function getBoxCenter2(box4) {
box4.startPoint[1] + (box4.endPoint[1] - box4.startPoint[1]) / 2
];
}
function cutBoxFromImageAndResize2(box4, image25, cropSize) {
const h = image25.shape[1];
const w = image25.shape[2];
function cutBoxFromImageAndResize2(box4, image24, cropSize) {
const h = image24.shape[1];
const w = image24.shape[2];
const boxes = [[
box4.startPoint[1] / h,
box4.startPoint[0] / w,
box4.endPoint[1] / h,
box4.endPoint[0] / w
]];
return tfjs_esm_exports.image.cropAndResize(image25, boxes, [0], cropSize);
return tfjs_esm_exports.image.cropAndResize(image24, boxes, [0], cropSize);
}
function scaleBoxCoordinates2(box4, factor) {
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
@ -8978,13 +8975,13 @@ var HandPipeline = class {
Math.trunc(coord[2])
]);
}
async estimateHands(image25, config3) {
async estimateHands(image24, config3) {
let useFreshBox = false;
let boxes;
const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
boxes = await this.handDetector.predict(image25, config3);
boxes = await this.handDetector.predict(image24, config3);
this.skipped = 0;
}
if (config3.skipAllowed)
@ -9003,8 +9000,8 @@ var HandPipeline = class {
if (config3.hand.landmarks) {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tfjs_esm_exports.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
const palmCenterNormalized = [palmCenter[0] / image24.shape[2], palmCenter[1] / image24.shape[1]];
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tfjs_esm_exports.image.rotateWithOffset(image24, angle, 0, palmCenterNormalized) : image24.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9942,7 +9939,7 @@ async function load11(config3) {
inputSize7 = 256;
return model10;
}
async function parseSinglePose(res, config3, image25, inputBox) {
async function parseSinglePose(res, config3, image24, inputBox) {
const kpt4 = res[0][0];
const keypoints = [];
let score = 0;
@ -9958,15 +9955,15 @@ async function parseSinglePose(res, config3, image25, inputBox) {
part: kpt3[id],
positionRaw,
position: [
Math.round((image25.shape[2] || 0) * positionRaw[0]),
Math.round((image25.shape[1] || 0) * positionRaw[1])
Math.round((image24.shape[2] || 0) * positionRaw[0]),
Math.round((image24.shape[1] || 0) * positionRaw[1])
]
});
}
}
score = keypoints.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
const bodies = [];
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
const annotations2 = {};
for (const [name, indexes] of Object.entries(connected3)) {
const pt = [];
@ -9983,7 +9980,7 @@ async function parseSinglePose(res, config3, image25, inputBox) {
bodies.push(body4);
return bodies;
}
async function parseMultiPose(res, config3, image25, inputBox) {
async function parseMultiPose(res, config3, image24, inputBox) {
const bodies = [];
for (let id = 0; id < res[0].length; id++) {
const kpt4 = res[0][id];
@ -10001,11 +9998,11 @@ async function parseMultiPose(res, config3, image25, inputBox) {
part: kpt3[i],
score: Math.round(100 * score) / 100,
positionRaw,
position: [Math.round((image25.shape[2] || 0) * positionRaw[0]), Math.round((image25.shape[1] || 0) * positionRaw[1])]
position: [Math.round((image24.shape[2] || 0) * positionRaw[0]), Math.round((image24.shape[1] || 0) * positionRaw[1])]
});
}
}
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
const annotations2 = {};
for (const [name, indexes] of Object.entries(connected3)) {
const pt = [];
@ -10057,7 +10054,7 @@ async function predict10(input, config3) {
// src/object/nanodet.ts
var model11;
var last4 = [];
var last5 = [];
var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5;
@ -10137,19 +10134,19 @@ async function process4(res, inputSize8, outputShape, config3) {
results = results.filter((_val, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
return results;
}
async function predict11(image25, config3) {
async function predict11(image24, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && last5.length > 0) {
skipped11++;
return last4;
return last5;
}
skipped11 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last4;
return last5;
return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]];
const resize = tfjs_esm_exports.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
const outputSize2 = [image24.shape[2], image24.shape[1]];
const resize = tfjs_esm_exports.image.resizeBilinear(image24, [model11.inputSize, model11.inputSize], false);
const norm = tfjs_esm_exports.div(resize, 255);
const transpose = norm.transpose([0, 3, 1, 2]);
tfjs_esm_exports.dispose(norm);
@ -10160,7 +10157,7 @@ async function predict11(image25, config3) {
lastTime10 = now();
tfjs_esm_exports.dispose(transpose);
const obj = await process4(objectT, model11.inputSize, outputSize2, config3);
last4 = obj;
last5 = obj;
resolve(obj);
});
}
@ -10526,7 +10523,7 @@ async function process5(input, background, config3) {
busy = true;
if (!model13)
await load14(config3);
const inputImage = process2(input, config3);
const inputImage = await process2(input, config3);
const width = ((_a = inputImage.canvas) == null ? void 0 : _a.width) || 0;
const height = ((_b = inputImage.canvas) == null ? void 0 : _b.height) || 0;
if (!inputImage.tensor)
@ -10577,7 +10574,7 @@ async function process5(input, background, config3) {
let mergedCanvas = null;
if (background && compositeCanvas) {
mergedCanvas = canvas(width, height);
const bgImage = process2(background, config3);
const bgImage = await process2(background, config3);
tfjs_esm_exports.dispose(bgImage.tensor);
const ctxMerge = mergedCanvas.getContext("2d");
ctxMerge.drawImage(bgImage.canvas, 0, 0, mergedCanvas.width, mergedCanvas.height);
@ -10819,6 +10816,7 @@ function registerCustomOps() {
kernelFunc: (op) => tfjs_esm_exports.tidy(() => tfjs_esm_exports.sub(op.inputs.a, tfjs_esm_exports.mul(tfjs_esm_exports.div(op.inputs.a, op.inputs.b), op.inputs.b)))
};
tfjs_esm_exports.registerKernel(kernelMod);
env.kernels.push("mod");
}
if (!env.kernels.includes("floormod")) {
const kernelMod = {
@ -10827,8 +10825,8 @@ function registerCustomOps() {
kernelFunc: (op) => tfjs_esm_exports.tidy(() => tfjs_esm_exports.floorDiv(op.inputs.a / op.inputs.b) * op.inputs.b + tfjs_esm_exports.mod(op.inputs.a, op.inputs.b))
};
tfjs_esm_exports.registerKernel(kernelMod);
env.kernels.push("floormod");
}
env.updateBackend();
}
async function check(instance, force = false) {
instance.state = "backend";
@ -10915,7 +10913,7 @@ async function check(instance, force = false) {
await tfjs_esm_exports.ready();
instance.performance.initBackend = Math.trunc(now() - timeStamp);
instance.config.backend = tfjs_esm_exports.getBackend();
env.updateBackend();
await env.updateBackend();
registerCustomOps();
}
return true;
@ -11611,8 +11609,9 @@ var body2 = (res) => {
gestures.push({ body: i, gesture: "raise right hand" });
const leftShoulder = res[i].keypoints.find((a) => a.part === "leftShoulder");
const rightShoulder = res[i].keypoints.find((a) => a.part === "rightShoulder");
if (leftShoulder && rightShoulder)
if (leftShoulder && rightShoulder && Math.abs(leftShoulder.positionRaw[1] - rightShoulder.positionRaw[1]) > 0.1) {
gestures.push({ body: i, gesture: `leaning ${leftShoulder.position[1] > rightShoulder.position[1] ? "left" : "right"}` });
}
}
return gestures;
};
@ -12949,7 +12948,7 @@ var Human = class {
await this.load();
timeStamp = now();
this.state = "image";
const img = process2(input, this.config);
const img = await process2(input, this.config);
this.process = img;
this.performance.inputProcess = this.env.perfadd ? (this.performance.inputProcess || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:");
@ -12969,7 +12968,7 @@ var Human = class {
this.performance.totalFrames++;
if (this.config.skipAllowed)
this.performance.cachedFrames++;
this.performance.inputCheck = this.env.perfadd ? (this.performance.inputCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.performance.cacheCheck = this.env.perfadd ? (this.performance.cacheCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes = [];
let bodyRes = [];

File diff suppressed because one or more lines are too long

153
dist/human.esm.js vendored
View File

@ -106,6 +106,7 @@ var config = {
warmup: "full",
cacheSensitivity: 0.7,
skipAllowed: false,
deallocate: false,
filter: {
enabled: true,
equalization: false,
@ -70569,7 +70570,7 @@ registerBackend("wasm", async () => {
const { wasm } = await init();
return new BackendWasm(wasm);
}, WASM_PRIORITY);
var externalVersion = "3.11.0-20211105";
var externalVersion = "3.11.0-20211106";
var version8 = {
tfjs: externalVersion,
"tfjs-core": externalVersion,
@ -71259,17 +71260,20 @@ function GLImageFilter() {
}
// src/image/enhance.ts
function histogramEqualization(input2) {
const channels = split(input2, 3, 2);
async function histogramEqualization(inputImage) {
const squeeze2 = inputImage.shape.length === 4 ? squeeze(inputImage) : inputImage;
const channels = split(squeeze2, 3, 2);
const min7 = [min(channels[0]), min(channels[1]), min(channels[2])];
const max7 = [max(channels[0]), max(channels[1]), max(channels[2])];
const absMax = await Promise.all(max7.map((channel) => channel.data()));
const maxValue = 0.99 * Math.max(absMax[0][0], absMax[1][0], absMax[2][0]);
const sub5 = [sub(channels[0], min7[0]), sub(channels[1], min7[1]), sub(channels[2], min7[2])];
const range7 = [sub(max7[0], min7[0]), sub(max7[1], min7[1]), sub(max7[2], min7[2])];
const fact = [div(255, range7[0]), div(255, range7[1]), div(255, range7[2])];
const fact = [div(maxValue, range7[0]), div(maxValue, range7[1]), div(maxValue, range7[2])];
const enh = [mul(sub5[0], fact[0]), mul(sub5[1], fact[1]), mul(sub5[2], fact[2])];
const rgb2 = stack([enh[0], enh[1], enh[2]], 2);
const reshape7 = reshape(rgb2, [1, input2.shape[0], input2.shape[1], 3]);
dispose([...channels, ...min7, ...max7, ...sub5, ...range7, ...fact, ...enh, rgb2]);
const reshape7 = reshape(rgb2, [1, squeeze2.shape[0], squeeze2.shape[1], 3]);
dispose([...channels, ...min7, ...max7, ...sub5, ...range7, ...fact, ...enh, rgb2, squeeze2]);
return reshape7;
}
@ -71279,6 +71283,12 @@ var inCanvas = null;
var outCanvas = null;
var tmpCanvas = null;
var fx;
var last = {
inputSum: 0,
cacheDiff: 1,
sumMethod: 0,
inputTensor: void 0
};
function canvas(width, height) {
let c;
if (env2.browser) {
@ -71305,7 +71315,7 @@ function copy(input2, output) {
ctx.drawImage(input2, 0, 0);
return outputCanvas;
}
function process2(input2, config3, getTensor2 = true) {
async function process2(input2, config3, getTensor2 = true) {
if (!input2) {
if (config3.debug)
log("input is missing");
@ -71355,7 +71365,7 @@ function process2(input2, config3, getTensor2 = true) {
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("input cannot determine dimension");
if (!inCanvas || inCanvas.width !== targetWidth || inCanvas.height !== targetHeight)
if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const inCtx = inCanvas.getContext("2d");
if (typeof ImageData !== "undefined" && input2 instanceof ImageData) {
@ -71364,13 +71374,13 @@ function process2(input2, config3, getTensor2 = true) {
if (config3.filter.flip && typeof inCtx.translate !== "undefined") {
inCtx.translate(originalWidth, 0);
inCtx.scale(-1, 1);
inCtx.drawImage(input2, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input2, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
inCtx.setTransform(1, 0, 0, 1, 0, 0);
} else {
inCtx.drawImage(input2, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input2, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
}
}
if (!outCanvas || inCanvas.width !== outCanvas.width || inCanvas.height !== outCanvas.height)
if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height))
outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env2.webgl.supported) {
if (!fx)
@ -71458,51 +71468,31 @@ function process2(input2, config3, getTensor2 = true) {
if (!pixels)
throw new Error("cannot create tensor from input");
const casted = cast(pixels, "float32");
const tensor2 = config3.filter.equalization ? histogramEqualization(casted) : expandDims(casted, 0);
const tensor2 = config3.filter.equalization ? await histogramEqualization(casted) : expandDims(casted, 0);
dispose([pixels, casted]);
return { tensor: tensor2, canvas: config3.filter.return ? outCanvas : null };
}
}
var lastInputSum = 0;
var lastCacheDiff = 1;
var benchmarked = 0;
var checksum = async (input2) => {
const resizeFact = 48;
const reduced = image.resizeBilinear(input2, [Math.trunc((input2.shape[1] || 1) / resizeFact), Math.trunc((input2.shape[2] || 1) / resizeFact)]);
const tfSum = async () => {
const sumT = sum2(reduced);
const sum0 = await sumT.data();
dispose(sumT);
return sum0[0];
};
const jsSum = async () => {
const reducedData = await reduced.data();
let sum0 = 0;
for (let i = 0; i < reducedData.length / 3; i++)
sum0 += reducedData[3 * i + 2];
return sum0;
};
if (benchmarked === 0) {
const t0 = now();
await jsSum();
const t1 = now();
await tfSum();
const t2 = now();
benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;
}
const res = benchmarked === 1 ? await jsSum() : await tfSum();
dispose(reduced);
return res;
};
async function skip(config3, input2) {
let skipFrame = false;
if (config3.cacheSensitivity === 0)
return false;
const sum7 = await checksum(input2);
const diff = 100 * (Math.max(sum7, lastInputSum) / Math.min(sum7, lastInputSum) - 1);
lastInputSum = sum7;
let skipFrame = diff < Math.max(config3.cacheSensitivity, lastCacheDiff);
lastCacheDiff = diff > 10 * config3.cacheSensitivity ? 0 : diff;
skipFrame = skipFrame && lastCacheDiff > 0;
return skipFrame;
if (!last.inputTensor) {
last.inputTensor = clone(input2);
} else if (last.inputTensor.shape[1] !== input2.shape[1] || last.inputTensor.shape[2] !== input2.shape[2]) {
dispose(last.inputTensor);
last.inputTensor = clone(input2);
} else {
const t = {};
t.diff = sub(input2, last.inputTensor);
t.squared = mul(t.diff, t.diff);
t.sum = sum2(t.squared);
const diffSum = await t.sum.data();
const diffRelative = diffSum[0] / (input2.shape[1] || 1) / (input2.shape[2] || 1) / 255 / 3;
dispose([last.inputTensor, t.diff, t.squared, t.sum]);
last.inputTensor = clone(input2);
skipFrame = diffRelative <= config3.cacheSensitivity;
}
return skipFrame;
}
@ -75484,7 +75474,7 @@ var labels = [
// src/object/centernet.ts
var model5;
var inputSize3 = 0;
var last = [];
var last2 = [];
var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) {
@ -75551,9 +75541,9 @@ async function process3(res, outputShape, config3) {
async function predict3(input2, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && last2.length > 0) {
skipped4++;
return last;
return last2;
}
skipped4 = 0;
return new Promise(async (resolve) => {
@ -75563,7 +75553,7 @@ async function predict3(input2, config3) {
lastTime3 = now();
dispose(resize);
const obj = await process3(objectT, outputSize2, config3);
last = obj;
last2 = obj;
resolve(obj);
});
}
@ -75715,7 +75705,7 @@ async function predict4(image7, config3) {
// src/gear/emotion.ts
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model7;
var last2 = [];
var last3 = [];
var lastCount2 = 0;
var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER;
@ -75740,9 +75730,9 @@ async function predict5(image7, config3, idx, count3) {
return null;
const skipFrame = skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count3 && last2[idx] && last2[idx].length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count3 && last3[idx] && last3[idx].length > 0) {
skipped6++;
return last2[idx];
return last3[idx];
}
skipped6 = 0;
return new Promise(async (resolve) => {
@ -75769,7 +75759,7 @@ async function predict5(image7, config3, idx, count3) {
obj.sort((a, b) => b.score - a.score);
Object.keys(t).forEach((tensor2) => dispose(t[tensor2]));
}
last2[idx] = obj;
last3[idx] = obj;
lastCount2 = count3;
resolve(obj);
});
@ -75915,7 +75905,7 @@ var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var enlargeFact = 1.6;
async function predict6(input2, config3) {
var _a, _b, _c, _d, _e, _f, _g, _h;
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
const skipTime = (((_a = config3.face.detector) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
@ -75959,8 +75949,13 @@ async function predict6(input2, config3) {
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input2, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
}
if ((_f = config3 == null ? void 0 : config3.filter) == null ? void 0 : _f.equalization) {
const equilized = await histogramEqualization(face5.tensor);
dispose(face5.tensor);
face5.tensor = equilized;
}
face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!((_f = config3.face.mesh) == null ? void 0 : _f.enabled)) {
if (!((_g = config3.face.mesh) == null ? void 0 : _g.enabled)) {
face5.box = getClampedBox(box4, input2);
face5.boxRaw = getRawBox(box4, input2);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
@ -75982,10 +75977,10 @@ async function predict6(input2, config3) {
const coordsReshaped = reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face5.faceScore < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
if (face5.faceScore < (((_h = config3.face.detector) == null ? void 0 : _h.minConfidence) || 1)) {
box4.confidence = face5.faceScore;
} else {
if ((_h = config3.face.iris) == null ? void 0 : _h.enabled)
if ((_i = config3.face.iris) == null ? void 0 : _i.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input2.shape[2] || 0), pt[1] / (input2.shape[1] || 0), (pt[2] || 0) / inputSize5]);
@ -76025,7 +76020,7 @@ var uvmap = UV468;
// src/face/faceres.ts
var model10;
var last3 = [];
var last4 = [];
var lastTime7 = 0;
var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER;
@ -76059,9 +76054,9 @@ async function predict7(image7, config3, idx, count3) {
return null;
const skipFrame = skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count3 && ((_c = last3[idx]) == null ? void 0 : _c.age) && ((_d = last3[idx]) == null ? void 0 : _d.age) > 0) {
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count3 && ((_c = last4[idx]) == null ? void 0 : _c.age) && ((_d = last4[idx]) == null ? void 0 : _d.age) > 0) {
skipped8++;
return last3[idx];
return last4[idx];
}
skipped8 = 0;
return new Promise(async (resolve) => {
@ -76095,7 +76090,7 @@ async function predict7(image7, config3, idx, count3) {
obj.descriptor = Array.from(descriptor);
resT.forEach((t) => dispose(t));
}
last3[idx] = obj;
last4[idx] = obj;
lastCount3 = count3;
resolve(obj);
});
@ -80384,7 +80379,7 @@ async function predict10(input2, config3) {
// src/object/nanodet.ts
var model12;
var last4 = [];
var last5 = [];
var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5;
@ -80467,13 +80462,13 @@ async function process4(res, inputSize8, outputShape, config3) {
async function predict11(image7, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && last5.length > 0) {
skipped11++;
return last4;
return last5;
}
skipped11 = 0;
if (!env2.kernels.includes("mod") || !env2.kernels.includes("sparsetodense"))
return last4;
return last5;
return new Promise(async (resolve) => {
const outputSize2 = [image7.shape[2], image7.shape[1]];
const resize = image.resizeBilinear(image7, [model12.inputSize, model12.inputSize], false);
@ -80487,7 +80482,7 @@ async function predict11(image7, config3) {
lastTime10 = now();
dispose(transpose6);
const obj = await process4(objectT, model12.inputSize, outputSize2, config3);
last4 = obj;
last5 = obj;
resolve(obj);
});
}
@ -80853,7 +80848,7 @@ async function process5(input2, background, config3) {
busy = true;
if (!model14)
await load14(config3);
const inputImage = process2(input2, config3);
const inputImage = await process2(input2, config3);
const width = ((_a = inputImage.canvas) == null ? void 0 : _a.width) || 0;
const height = ((_b = inputImage.canvas) == null ? void 0 : _b.height) || 0;
if (!inputImage.tensor)
@ -80904,7 +80899,7 @@ async function process5(input2, background, config3) {
let mergedCanvas = null;
if (background && compositeCanvas) {
mergedCanvas = canvas(width, height);
const bgImage = process2(background, config3);
const bgImage = await process2(background, config3);
dispose(bgImage.tensor);
const ctxMerge = mergedCanvas.getContext("2d");
ctxMerge.drawImage(bgImage.canvas, 0, 0, mergedCanvas.width, mergedCanvas.height);
@ -81146,6 +81141,7 @@ function registerCustomOps() {
kernelFunc: (op2) => tidy(() => sub(op2.inputs.a, mul(div(op2.inputs.a, op2.inputs.b), op2.inputs.b)))
};
registerKernel(kernelMod);
env2.kernels.push("mod");
}
if (!env2.kernels.includes("floormod")) {
const kernelMod = {
@ -81154,8 +81150,8 @@ function registerCustomOps() {
kernelFunc: (op2) => tidy(() => floorDiv(op2.inputs.a / op2.inputs.b) * op2.inputs.b + mod(op2.inputs.a, op2.inputs.b))
};
registerKernel(kernelMod);
env2.kernels.push("floormod");
}
env2.updateBackend();
}
async function check(instance, force = false) {
instance.state = "backend";
@ -81242,7 +81238,7 @@ async function check(instance, force = false) {
await ready();
instance.performance.initBackend = Math.trunc(now() - timeStamp);
instance.config.backend = getBackend();
env2.updateBackend();
await env2.updateBackend();
registerCustomOps();
}
return true;
@ -81938,8 +81934,9 @@ var body2 = (res) => {
gestures.push({ body: i, gesture: "raise right hand" });
const leftShoulder = res[i].keypoints.find((a) => a.part === "leftShoulder");
const rightShoulder = res[i].keypoints.find((a) => a.part === "rightShoulder");
if (leftShoulder && rightShoulder)
if (leftShoulder && rightShoulder && Math.abs(leftShoulder.positionRaw[1] - rightShoulder.positionRaw[1]) > 0.1) {
gestures.push({ body: i, gesture: `leaning ${leftShoulder.position[1] > rightShoulder.position[1] ? "left" : "right"}` });
}
}
return gestures;
};
@ -83276,7 +83273,7 @@ var Human = class {
await this.load();
timeStamp = now();
this.state = "image";
const img = process2(input2, this.config);
const img = await process2(input2, this.config);
this.process = img;
this.performance.inputProcess = this.env.perfadd ? (this.performance.inputProcess || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:");
@ -83296,7 +83293,7 @@ var Human = class {
this.performance.totalFrames++;
if (this.config.skipAllowed)
this.performance.cachedFrames++;
this.performance.inputCheck = this.env.perfadd ? (this.performance.inputCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.performance.cacheCheck = this.env.perfadd ? (this.performance.cacheCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes = [];
let bodyRes = [];

File diff suppressed because one or more lines are too long

79900
dist/human.js vendored

File diff suppressed because one or more lines are too long

235
dist/human.node-gpu.js vendored
View File

@ -152,6 +152,7 @@ var config = {
warmup: "full",
cacheSensitivity: 0.7,
skipAllowed: false,
deallocate: false,
filter: {
enabled: true,
equalization: false,
@ -918,8 +919,8 @@ function GLImageFilter() {
this.get = function() {
return filterChain;
};
this.apply = function(image25) {
resize(image25.width, image25.height);
this.apply = function(image24) {
resize(image24.width, image24.height);
drawCount = 0;
if (!sourceTexture)
sourceTexture = gl.createTexture();
@ -928,7 +929,7 @@ function GLImageFilter() {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image25);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image24);
for (let i = 0; i < filterChain.length; i++) {
lastInChain = i === filterChain.length - 1;
const f = filterChain[i];
@ -936,25 +937,28 @@ function GLImageFilter() {
}
return fxcanvas;
};
this.draw = function(image25) {
this.draw = function(image24) {
this.add("brightness", 0);
return this.apply(image25);
return this.apply(image24);
};
}
// src/image/enhance.ts
var tf = __toModule(require_tfjs_esm());
function histogramEqualization(input) {
const channels = tf.split(input, 3, 2);
async function histogramEqualization(inputImage) {
const squeeze9 = inputImage.shape.length === 4 ? tf.squeeze(inputImage) : inputImage;
const channels = tf.split(squeeze9, 3, 2);
const min2 = [tf.min(channels[0]), tf.min(channels[1]), tf.min(channels[2])];
const max4 = [tf.max(channels[0]), tf.max(channels[1]), tf.max(channels[2])];
const sub6 = [tf.sub(channels[0], min2[0]), tf.sub(channels[1], min2[1]), tf.sub(channels[2], min2[2])];
const absMax = await Promise.all(max4.map((channel) => channel.data()));
const maxValue = 0.99 * Math.max(absMax[0][0], absMax[1][0], absMax[2][0]);
const sub7 = [tf.sub(channels[0], min2[0]), tf.sub(channels[1], min2[1]), tf.sub(channels[2], min2[2])];
const range = [tf.sub(max4[0], min2[0]), tf.sub(max4[1], min2[1]), tf.sub(max4[2], min2[2])];
const fact = [tf.div(255, range[0]), tf.div(255, range[1]), tf.div(255, range[2])];
const enh = [tf.mul(sub6[0], fact[0]), tf.mul(sub6[1], fact[1]), tf.mul(sub6[2], fact[2])];
const fact = [tf.div(maxValue, range[0]), tf.div(maxValue, range[1]), tf.div(maxValue, range[2])];
const enh = [tf.mul(sub7[0], fact[0]), tf.mul(sub7[1], fact[1]), tf.mul(sub7[2], fact[2])];
const rgb2 = tf.stack([enh[0], enh[1], enh[2]], 2);
const reshape8 = tf.reshape(rgb2, [1, input.shape[0], input.shape[1], 3]);
tf.dispose([...channels, ...min2, ...max4, ...sub6, ...range, ...fact, ...enh, rgb2]);
const reshape8 = tf.reshape(rgb2, [1, squeeze9.shape[0], squeeze9.shape[1], 3]);
tf.dispose([...channels, ...min2, ...max4, ...sub7, ...range, ...fact, ...enh, rgb2, squeeze9]);
return reshape8;
}
@ -964,6 +968,12 @@ var inCanvas = null;
var outCanvas = null;
var tmpCanvas = null;
var fx;
var last = {
inputSum: 0,
cacheDiff: 1,
sumMethod: 0,
inputTensor: void 0
};
function canvas(width, height) {
let c;
if (env.browser) {
@ -990,7 +1000,7 @@ function copy(input, output) {
ctx.drawImage(input, 0, 0);
return outputCanvas;
}
function process2(input, config3, getTensor = true) {
async function process2(input, config3, getTensor = true) {
if (!input) {
if (config3.debug)
log("input is missing");
@ -1040,7 +1050,7 @@ function process2(input, config3, getTensor = true) {
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("input cannot determine dimension");
if (!inCanvas || inCanvas.width !== targetWidth || inCanvas.height !== targetHeight)
if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const inCtx = inCanvas.getContext("2d");
if (typeof ImageData !== "undefined" && input instanceof ImageData) {
@ -1049,13 +1059,13 @@ function process2(input, config3, getTensor = true) {
if (config3.filter.flip && typeof inCtx.translate !== "undefined") {
inCtx.translate(originalWidth, 0);
inCtx.scale(-1, 1);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
inCtx.setTransform(1, 0, 0, 1, 0, 0);
} else {
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
}
}
if (!outCanvas || inCanvas.width !== outCanvas.width || inCanvas.height !== outCanvas.height)
if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height))
outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env.webgl.supported) {
if (!fx)
@ -1143,51 +1153,31 @@ function process2(input, config3, getTensor = true) {
if (!pixels)
throw new Error("cannot create tensor from input");
const casted = tf2.cast(pixels, "float32");
const tensor3 = config3.filter.equalization ? histogramEqualization(casted) : tf2.expandDims(casted, 0);
const tensor3 = config3.filter.equalization ? await histogramEqualization(casted) : tf2.expandDims(casted, 0);
tf2.dispose([pixels, casted]);
return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null };
}
}
var lastInputSum = 0;
var lastCacheDiff = 1;
var benchmarked = 0;
var checksum = async (input) => {
const resizeFact = 48;
const reduced = tf2.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]);
const tfSum = async () => {
const sumT = tf2.sum(reduced);
const sum0 = await sumT.data();
tf2.dispose(sumT);
return sum0[0];
};
const jsSum = async () => {
const reducedData = await reduced.data();
let sum0 = 0;
for (let i = 0; i < reducedData.length / 3; i++)
sum0 += reducedData[3 * i + 2];
return sum0;
};
if (benchmarked === 0) {
const t0 = now();
await jsSum();
const t1 = now();
await tfSum();
const t2 = now();
benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;
}
const res = benchmarked === 1 ? await jsSum() : await tfSum();
tf2.dispose(reduced);
return res;
};
async function skip(config3, input) {
let skipFrame = false;
if (config3.cacheSensitivity === 0)
return false;
const sum2 = await checksum(input);
const diff = 100 * (Math.max(sum2, lastInputSum) / Math.min(sum2, lastInputSum) - 1);
lastInputSum = sum2;
let skipFrame = diff < Math.max(config3.cacheSensitivity, lastCacheDiff);
lastCacheDiff = diff > 10 * config3.cacheSensitivity ? 0 : diff;
skipFrame = skipFrame && lastCacheDiff > 0;
return skipFrame;
if (!last.inputTensor) {
last.inputTensor = tf2.clone(input);
} else if (last.inputTensor.shape[1] !== input.shape[1] || last.inputTensor.shape[2] !== input.shape[2]) {
tf2.dispose(last.inputTensor);
last.inputTensor = tf2.clone(input);
} else {
const t = {};
t.diff = tf2.sub(input, last.inputTensor);
t.squared = tf2.mul(t.diff, t.diff);
t.sum = tf2.sum(t.squared);
const diffSum = await t.sum.data();
const diffRelative = diffSum[0] / (input.shape[1] || 1) / (input.shape[2] || 1) / 255 / 3;
tf2.dispose([last.inputTensor, t.diff, t.squared, t.sum]);
last.inputTensor = tf2.clone(input);
skipFrame = diffRelative <= config3.cacheSensitivity;
}
return skipFrame;
}
@ -1349,7 +1339,7 @@ async function load2(config3) {
log("cached model:", model2["modelUrl"]);
return model2;
}
async function predict(image25, config3, idx, count2) {
async function predict(image24, config3, idx, count2) {
var _a, _b;
if (!model2)
return null;
@ -1361,7 +1351,7 @@ async function predict(image25, config3, idx, count2) {
}
skipped2 = 0;
return new Promise(async (resolve) => {
const resize = tf5.image.resizeBilinear(image25, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
const resize = tf5.image.resizeBilinear(image24, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
const res = model2 == null ? void 0 : model2.execute(resize);
const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100;
@ -4681,10 +4671,10 @@ var scaleBoxCoordinates = (box4, factor) => {
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
};
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
const h = image25.shape[1];
const w = image25.shape[2];
const crop2 = tf6.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
var cutBoxFromImageAndResize = (box4, image24, cropSize) => {
const h = image24.shape[1];
const w = image24.shape[2];
const crop2 = tf6.image.cropAndResize(image24, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const norm = tf6.div(crop2, 255);
tf6.dispose(crop2);
return norm;
@ -5189,7 +5179,7 @@ var labels = [
// src/object/centernet.ts
var model4;
var inputSize3 = 0;
var last = [];
var last2 = [];
var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) {
@ -5256,9 +5246,9 @@ async function process3(res, outputShape, config3) {
async function predict3(input, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && last2.length > 0) {
skipped4++;
return last;
return last2;
}
skipped4 = 0;
return new Promise(async (resolve) => {
@ -5268,7 +5258,7 @@ async function predict3(input, config3) {
lastTime3 = now();
tf9.dispose(resize);
const obj = await process3(objectT, outputSize2, config3);
last = obj;
last2 = obj;
resolve(obj);
});
}
@ -5341,7 +5331,7 @@ function max2d(inputs, minScore) {
return [0, 0, newScore];
});
}
async function predict4(image25, config3) {
async function predict4(image24, config3) {
const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
const skipFrame = skipped5 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
@ -5354,7 +5344,7 @@ async function predict4(image25, config3) {
const tensor3 = tf10.tidy(() => {
if (!(model5 == null ? void 0 : model5.inputs[0].shape))
return null;
const resize = tf10.image.resizeBilinear(image25, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
const resize = tf10.image.resizeBilinear(image24, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
const enhance3 = tf10.mul(resize, 2);
const norm = enhance3.sub(1);
return norm;
@ -5366,10 +5356,10 @@ async function predict4(image25, config3) {
tf10.dispose(tensor3);
if (resT) {
cache2.keypoints.length = 0;
const squeeze8 = resT.squeeze();
const squeeze9 = resT.squeeze();
tf10.dispose(resT);
const stack4 = squeeze8.unstack(2);
tf10.dispose(squeeze8);
const stack4 = squeeze9.unstack(2);
tf10.dispose(squeeze9);
for (let id = 0; id < stack4.length; id++) {
const [x2, y2, partScore] = max2d(stack4[id], config3.body.minConfidence);
if (partScore > (((_a = config3.body) == null ? void 0 : _a.minConfidence) || 0)) {
@ -5381,8 +5371,8 @@ async function predict4(image25, config3) {
y2 / model5.inputs[0].shape[1]
],
position: [
Math.round(image25.shape[2] * x2 / model5.inputs[0].shape[2]),
Math.round(image25.shape[1] * y2 / model5.inputs[0].shape[1])
Math.round(image24.shape[2] * x2 / model5.inputs[0].shape[2]),
Math.round(image24.shape[1] * y2 / model5.inputs[0].shape[1])
]
});
}
@ -5424,7 +5414,7 @@ async function predict4(image25, config3) {
var tf11 = __toModule(require_tfjs_esm());
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model6;
var last2 = [];
var last3 = [];
var lastCount2 = 0;
var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER;
@ -5443,15 +5433,15 @@ async function load6(config3) {
log("cached model:", model6["modelUrl"]);
return model6;
}
async function predict5(image25, config3, idx, count2) {
async function predict5(image24, config3, idx, count2) {
var _a, _b;
if (!model6)
return null;
const skipFrame = skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last2[idx] && last2[idx].length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last3[idx] && last3[idx].length > 0) {
skipped6++;
return last2[idx];
return last3[idx];
}
skipped6 = 0;
return new Promise(async (resolve) => {
@ -5460,7 +5450,7 @@ async function predict5(image25, config3, idx, count2) {
if ((_a2 = config3.face.emotion) == null ? void 0 : _a2.enabled) {
const t = {};
const inputSize8 = (model6 == null ? void 0 : model6.inputs[0].shape) ? model6.inputs[0].shape[2] : 0;
t.resize = tf11.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
t.resize = tf11.image.resizeBilinear(image24, [inputSize8, inputSize8], false);
[t.red, t.green, t.blue] = tf11.split(t.resize, 3, 3);
t.redNorm = tf11.mul(t.red, rgb[0]);
t.greenNorm = tf11.mul(t.green, rgb[1]);
@ -5478,7 +5468,7 @@ async function predict5(image25, config3, idx, count2) {
obj.sort((a, b) => b.score - a.score);
Object.keys(t).forEach((tensor3) => tf11.dispose(t[tensor3]));
}
last2[idx] = obj;
last3[idx] = obj;
lastCount2 = count2;
resolve(obj);
});
@ -5628,7 +5618,7 @@ var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var enlargeFact = 1.6;
async function predict6(input, config3) {
var _a, _b, _c, _d, _e, _f, _g, _h;
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
const skipTime = (((_a = config3.face.detector) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
@ -5672,8 +5662,13 @@ async function predict6(input, config3) {
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
}
if ((_f = config3 == null ? void 0 : config3.filter) == null ? void 0 : _f.equalization) {
const equilized = await histogramEqualization(face5.tensor);
tf13.dispose(face5.tensor);
face5.tensor = equilized;
}
face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!((_f = config3.face.mesh) == null ? void 0 : _f.enabled)) {
if (!((_g = config3.face.mesh) == null ? void 0 : _g.enabled)) {
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
@ -5695,10 +5690,10 @@ async function predict6(input, config3) {
const coordsReshaped = tf13.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tf13.dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face5.faceScore < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
if (face5.faceScore < (((_h = config3.face.detector) == null ? void 0 : _h.minConfidence) || 1)) {
box4.confidence = face5.faceScore;
} else {
if ((_h = config3.face.iris) == null ? void 0 : _h.enabled)
if ((_i = config3.face.iris) == null ? void 0 : _i.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
@ -5739,7 +5734,7 @@ var uvmap = UV468;
// src/face/faceres.ts
var tf14 = __toModule(require_tfjs_esm());
var model9;
var last3 = [];
var last4 = [];
var lastTime7 = 0;
var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER;
@ -5767,15 +5762,15 @@ function enhance2(input) {
tf14.dispose(crop2);
return norm;
}
async function predict7(image25, config3, idx, count2) {
async function predict7(image24, config3, idx, count2) {
var _a, _b, _c, _d;
if (!model9)
return null;
const skipFrame = skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last3[idx]) == null ? void 0 : _c.age) && ((_d = last3[idx]) == null ? void 0 : _d.age) > 0) {
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last4[idx]) == null ? void 0 : _c.age) && ((_d = last4[idx]) == null ? void 0 : _d.age) > 0) {
skipped8++;
return last3[idx];
return last4[idx];
}
skipped8 = 0;
return new Promise(async (resolve) => {
@ -5787,7 +5782,7 @@ async function predict7(image25, config3, idx, count2) {
descriptor: []
};
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
const enhanced = enhance2(image25);
const enhanced = enhance2(image24);
const resT = model9 == null ? void 0 : model9.execute(enhanced);
lastTime7 = now();
tf14.dispose(enhanced);
@ -5809,7 +5804,7 @@ async function predict7(image25, config3, idx, count2) {
obj.descriptor = Array.from(descriptor);
resT.forEach((t) => tf14.dispose(t));
}
last3[idx] = obj;
last4[idx] = obj;
lastCount3 = count2;
resolve(obj);
});
@ -5835,16 +5830,16 @@ function getBoxCenter2(box4) {
box4.startPoint[1] + (box4.endPoint[1] - box4.startPoint[1]) / 2
];
}
function cutBoxFromImageAndResize2(box4, image25, cropSize) {
const h = image25.shape[1];
const w = image25.shape[2];
function cutBoxFromImageAndResize2(box4, image24, cropSize) {
const h = image24.shape[1];
const w = image24.shape[2];
const boxes = [[
box4.startPoint[1] / h,
box4.startPoint[0] / w,
box4.endPoint[1] / h,
box4.endPoint[0] / w
]];
return tf15.image.cropAndResize(image25, boxes, [0], cropSize);
return tf15.image.cropAndResize(image24, boxes, [0], cropSize);
}
function scaleBoxCoordinates2(box4, factor) {
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
@ -9027,13 +9022,13 @@ var HandPipeline = class {
Math.trunc(coord[2])
]);
}
async estimateHands(image25, config3) {
async estimateHands(image24, config3) {
let useFreshBox = false;
let boxes;
const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
boxes = await this.handDetector.predict(image25, config3);
boxes = await this.handDetector.predict(image24, config3);
this.skipped = 0;
}
if (config3.skipAllowed)
@ -9052,8 +9047,8 @@ var HandPipeline = class {
if (config3.hand.landmarks) {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
const palmCenterNormalized = [palmCenter[0] / image24.shape[2], palmCenter[1] / image24.shape[1]];
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image24, angle, 0, palmCenterNormalized) : image24.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9996,7 +9991,7 @@ async function load11(config3) {
inputSize7 = 256;
return model10;
}
async function parseSinglePose(res, config3, image25, inputBox) {
async function parseSinglePose(res, config3, image24, inputBox) {
const kpt4 = res[0][0];
const keypoints = [];
let score = 0;
@ -10012,15 +10007,15 @@ async function parseSinglePose(res, config3, image25, inputBox) {
part: kpt3[id],
positionRaw,
position: [
Math.round((image25.shape[2] || 0) * positionRaw[0]),
Math.round((image25.shape[1] || 0) * positionRaw[1])
Math.round((image24.shape[2] || 0) * positionRaw[0]),
Math.round((image24.shape[1] || 0) * positionRaw[1])
]
});
}
}
score = keypoints.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
const bodies = [];
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
const annotations2 = {};
for (const [name, indexes] of Object.entries(connected3)) {
const pt = [];
@ -10037,7 +10032,7 @@ async function parseSinglePose(res, config3, image25, inputBox) {
bodies.push(body4);
return bodies;
}
async function parseMultiPose(res, config3, image25, inputBox) {
async function parseMultiPose(res, config3, image24, inputBox) {
const bodies = [];
for (let id = 0; id < res[0].length; id++) {
const kpt4 = res[0][id];
@ -10055,11 +10050,11 @@ async function parseMultiPose(res, config3, image25, inputBox) {
part: kpt3[i],
score: Math.round(100 * score) / 100,
positionRaw,
position: [Math.round((image25.shape[2] || 0) * positionRaw[0]), Math.round((image25.shape[1] || 0) * positionRaw[1])]
position: [Math.round((image24.shape[2] || 0) * positionRaw[0]), Math.round((image24.shape[1] || 0) * positionRaw[1])]
});
}
}
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
const annotations2 = {};
for (const [name, indexes] of Object.entries(connected3)) {
const pt = [];
@ -10112,7 +10107,7 @@ async function predict10(input, config3) {
// src/object/nanodet.ts
var tf22 = __toModule(require_tfjs_esm());
var model11;
var last4 = [];
var last5 = [];
var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5;
@ -10192,19 +10187,19 @@ async function process4(res, inputSize8, outputShape, config3) {
results = results.filter((_val, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
return results;
}
async function predict11(image25, config3) {
async function predict11(image24, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && last5.length > 0) {
skipped11++;
return last4;
return last5;
}
skipped11 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last4;
return last5;
return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]];
const resize = tf22.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
const outputSize2 = [image24.shape[2], image24.shape[1]];
const resize = tf22.image.resizeBilinear(image24, [model11.inputSize, model11.inputSize], false);
const norm = tf22.div(resize, 255);
const transpose = norm.transpose([0, 3, 1, 2]);
tf22.dispose(norm);
@ -10215,7 +10210,7 @@ async function predict11(image25, config3) {
lastTime10 = now();
tf22.dispose(transpose);
const obj = await process4(objectT, model11.inputSize, outputSize2, config3);
last4 = obj;
last5 = obj;
resolve(obj);
});
}
@ -10585,7 +10580,7 @@ async function process5(input, background, config3) {
busy = true;
if (!model13)
await load14(config3);
const inputImage = process2(input, config3);
const inputImage = await process2(input, config3);
const width = ((_a = inputImage.canvas) == null ? void 0 : _a.width) || 0;
const height = ((_b = inputImage.canvas) == null ? void 0 : _b.height) || 0;
if (!inputImage.tensor)
@ -10636,7 +10631,7 @@ async function process5(input, background, config3) {
let mergedCanvas = null;
if (background && compositeCanvas) {
mergedCanvas = canvas(width, height);
const bgImage = process2(background, config3);
const bgImage = await process2(background, config3);
tf24.dispose(bgImage.tensor);
const ctxMerge = mergedCanvas.getContext("2d");
ctxMerge.drawImage(bgImage.canvas, 0, 0, mergedCanvas.width, mergedCanvas.height);
@ -10879,6 +10874,7 @@ function registerCustomOps() {
kernelFunc: (op) => tf26.tidy(() => tf26.sub(op.inputs.a, tf26.mul(tf26.div(op.inputs.a, op.inputs.b), op.inputs.b)))
};
tf26.registerKernel(kernelMod);
env.kernels.push("mod");
}
if (!env.kernels.includes("floormod")) {
const kernelMod = {
@ -10887,8 +10883,8 @@ function registerCustomOps() {
kernelFunc: (op) => tf26.tidy(() => tf26.floorDiv(op.inputs.a / op.inputs.b) * op.inputs.b + tf26.mod(op.inputs.a, op.inputs.b))
};
tf26.registerKernel(kernelMod);
env.kernels.push("floormod");
}
env.updateBackend();
}
async function check(instance, force = false) {
instance.state = "backend";
@ -10975,7 +10971,7 @@ async function check(instance, force = false) {
await tf26.ready();
instance.performance.initBackend = Math.trunc(now() - timeStamp);
instance.config.backend = tf26.getBackend();
env.updateBackend();
await env.updateBackend();
registerCustomOps();
}
return true;
@ -11674,8 +11670,9 @@ var body2 = (res) => {
gestures.push({ body: i, gesture: "raise right hand" });
const leftShoulder = res[i].keypoints.find((a) => a.part === "leftShoulder");
const rightShoulder = res[i].keypoints.find((a) => a.part === "rightShoulder");
if (leftShoulder && rightShoulder)
if (leftShoulder && rightShoulder && Math.abs(leftShoulder.positionRaw[1] - rightShoulder.positionRaw[1]) > 0.1) {
gestures.push({ body: i, gesture: `leaning ${leftShoulder.position[1] > rightShoulder.position[1] ? "left" : "right"}` });
}
}
return gestures;
};
@ -13013,7 +13010,7 @@ var Human = class {
await this.load();
timeStamp = now();
this.state = "image";
const img = process2(input, this.config);
const img = await process2(input, this.config);
this.process = img;
this.performance.inputProcess = this.env.perfadd ? (this.performance.inputProcess || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:");
@ -13033,7 +13030,7 @@ var Human = class {
this.performance.totalFrames++;
if (this.config.skipAllowed)
this.performance.cachedFrames++;
this.performance.inputCheck = this.env.perfadd ? (this.performance.inputCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.performance.cacheCheck = this.env.perfadd ? (this.performance.cacheCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes = [];
let bodyRes = [];

View File

@ -153,6 +153,7 @@ var config = {
warmup: "full",
cacheSensitivity: 0.7,
skipAllowed: false,
deallocate: false,
filter: {
enabled: true,
equalization: false,
@ -919,8 +920,8 @@ function GLImageFilter() {
this.get = function() {
return filterChain;
};
this.apply = function(image25) {
resize(image25.width, image25.height);
this.apply = function(image24) {
resize(image24.width, image24.height);
drawCount = 0;
if (!sourceTexture)
sourceTexture = gl.createTexture();
@ -929,7 +930,7 @@ function GLImageFilter() {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image25);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image24);
for (let i = 0; i < filterChain.length; i++) {
lastInChain = i === filterChain.length - 1;
const f = filterChain[i];
@ -937,25 +938,28 @@ function GLImageFilter() {
}
return fxcanvas;
};
this.draw = function(image25) {
this.draw = function(image24) {
this.add("brightness", 0);
return this.apply(image25);
return this.apply(image24);
};
}
// src/image/enhance.ts
var tf = __toModule(require_tfjs_esm());
function histogramEqualization(input) {
const channels = tf.split(input, 3, 2);
async function histogramEqualization(inputImage) {
const squeeze9 = inputImage.shape.length === 4 ? tf.squeeze(inputImage) : inputImage;
const channels = tf.split(squeeze9, 3, 2);
const min2 = [tf.min(channels[0]), tf.min(channels[1]), tf.min(channels[2])];
const max4 = [tf.max(channels[0]), tf.max(channels[1]), tf.max(channels[2])];
const sub6 = [tf.sub(channels[0], min2[0]), tf.sub(channels[1], min2[1]), tf.sub(channels[2], min2[2])];
const absMax = await Promise.all(max4.map((channel) => channel.data()));
const maxValue = 0.99 * Math.max(absMax[0][0], absMax[1][0], absMax[2][0]);
const sub7 = [tf.sub(channels[0], min2[0]), tf.sub(channels[1], min2[1]), tf.sub(channels[2], min2[2])];
const range = [tf.sub(max4[0], min2[0]), tf.sub(max4[1], min2[1]), tf.sub(max4[2], min2[2])];
const fact = [tf.div(255, range[0]), tf.div(255, range[1]), tf.div(255, range[2])];
const enh = [tf.mul(sub6[0], fact[0]), tf.mul(sub6[1], fact[1]), tf.mul(sub6[2], fact[2])];
const fact = [tf.div(maxValue, range[0]), tf.div(maxValue, range[1]), tf.div(maxValue, range[2])];
const enh = [tf.mul(sub7[0], fact[0]), tf.mul(sub7[1], fact[1]), tf.mul(sub7[2], fact[2])];
const rgb2 = tf.stack([enh[0], enh[1], enh[2]], 2);
const reshape8 = tf.reshape(rgb2, [1, input.shape[0], input.shape[1], 3]);
tf.dispose([...channels, ...min2, ...max4, ...sub6, ...range, ...fact, ...enh, rgb2]);
const reshape8 = tf.reshape(rgb2, [1, squeeze9.shape[0], squeeze9.shape[1], 3]);
tf.dispose([...channels, ...min2, ...max4, ...sub7, ...range, ...fact, ...enh, rgb2, squeeze9]);
return reshape8;
}
@ -965,6 +969,12 @@ var inCanvas = null;
var outCanvas = null;
var tmpCanvas = null;
var fx;
var last = {
inputSum: 0,
cacheDiff: 1,
sumMethod: 0,
inputTensor: void 0
};
function canvas(width, height) {
let c;
if (env.browser) {
@ -991,7 +1001,7 @@ function copy(input, output) {
ctx.drawImage(input, 0, 0);
return outputCanvas;
}
function process2(input, config3, getTensor = true) {
async function process2(input, config3, getTensor = true) {
if (!input) {
if (config3.debug)
log("input is missing");
@ -1041,7 +1051,7 @@ function process2(input, config3, getTensor = true) {
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("input cannot determine dimension");
if (!inCanvas || inCanvas.width !== targetWidth || inCanvas.height !== targetHeight)
if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const inCtx = inCanvas.getContext("2d");
if (typeof ImageData !== "undefined" && input instanceof ImageData) {
@ -1050,13 +1060,13 @@ function process2(input, config3, getTensor = true) {
if (config3.filter.flip && typeof inCtx.translate !== "undefined") {
inCtx.translate(originalWidth, 0);
inCtx.scale(-1, 1);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
inCtx.setTransform(1, 0, 0, 1, 0, 0);
} else {
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
}
}
if (!outCanvas || inCanvas.width !== outCanvas.width || inCanvas.height !== outCanvas.height)
if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height))
outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env.webgl.supported) {
if (!fx)
@ -1144,51 +1154,31 @@ function process2(input, config3, getTensor = true) {
if (!pixels)
throw new Error("cannot create tensor from input");
const casted = tf2.cast(pixels, "float32");
const tensor3 = config3.filter.equalization ? histogramEqualization(casted) : tf2.expandDims(casted, 0);
const tensor3 = config3.filter.equalization ? await histogramEqualization(casted) : tf2.expandDims(casted, 0);
tf2.dispose([pixels, casted]);
return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null };
}
}
var lastInputSum = 0;
var lastCacheDiff = 1;
var benchmarked = 0;
var checksum = async (input) => {
const resizeFact = 48;
const reduced = tf2.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]);
const tfSum = async () => {
const sumT = tf2.sum(reduced);
const sum0 = await sumT.data();
tf2.dispose(sumT);
return sum0[0];
};
const jsSum = async () => {
const reducedData = await reduced.data();
let sum0 = 0;
for (let i = 0; i < reducedData.length / 3; i++)
sum0 += reducedData[3 * i + 2];
return sum0;
};
if (benchmarked === 0) {
const t0 = now();
await jsSum();
const t1 = now();
await tfSum();
const t2 = now();
benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;
}
const res = benchmarked === 1 ? await jsSum() : await tfSum();
tf2.dispose(reduced);
return res;
};
async function skip(config3, input) {
let skipFrame = false;
if (config3.cacheSensitivity === 0)
return false;
const sum2 = await checksum(input);
const diff = 100 * (Math.max(sum2, lastInputSum) / Math.min(sum2, lastInputSum) - 1);
lastInputSum = sum2;
let skipFrame = diff < Math.max(config3.cacheSensitivity, lastCacheDiff);
lastCacheDiff = diff > 10 * config3.cacheSensitivity ? 0 : diff;
skipFrame = skipFrame && lastCacheDiff > 0;
return skipFrame;
if (!last.inputTensor) {
last.inputTensor = tf2.clone(input);
} else if (last.inputTensor.shape[1] !== input.shape[1] || last.inputTensor.shape[2] !== input.shape[2]) {
tf2.dispose(last.inputTensor);
last.inputTensor = tf2.clone(input);
} else {
const t = {};
t.diff = tf2.sub(input, last.inputTensor);
t.squared = tf2.mul(t.diff, t.diff);
t.sum = tf2.sum(t.squared);
const diffSum = await t.sum.data();
const diffRelative = diffSum[0] / (input.shape[1] || 1) / (input.shape[2] || 1) / 255 / 3;
tf2.dispose([last.inputTensor, t.diff, t.squared, t.sum]);
last.inputTensor = tf2.clone(input);
skipFrame = diffRelative <= config3.cacheSensitivity;
}
return skipFrame;
}
@ -1350,7 +1340,7 @@ async function load2(config3) {
log("cached model:", model2["modelUrl"]);
return model2;
}
async function predict(image25, config3, idx, count2) {
async function predict(image24, config3, idx, count2) {
var _a, _b;
if (!model2)
return null;
@ -1362,7 +1352,7 @@ async function predict(image25, config3, idx, count2) {
}
skipped2 = 0;
return new Promise(async (resolve) => {
const resize = tf5.image.resizeBilinear(image25, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
const resize = tf5.image.resizeBilinear(image24, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
const res = model2 == null ? void 0 : model2.execute(resize);
const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100;
@ -4682,10 +4672,10 @@ var scaleBoxCoordinates = (box4, factor) => {
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
};
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
const h = image25.shape[1];
const w = image25.shape[2];
const crop2 = tf6.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
var cutBoxFromImageAndResize = (box4, image24, cropSize) => {
const h = image24.shape[1];
const w = image24.shape[2];
const crop2 = tf6.image.cropAndResize(image24, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const norm = tf6.div(crop2, 255);
tf6.dispose(crop2);
return norm;
@ -5190,7 +5180,7 @@ var labels = [
// src/object/centernet.ts
var model4;
var inputSize3 = 0;
var last = [];
var last2 = [];
var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) {
@ -5257,9 +5247,9 @@ async function process3(res, outputShape, config3) {
async function predict3(input, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && last2.length > 0) {
skipped4++;
return last;
return last2;
}
skipped4 = 0;
return new Promise(async (resolve) => {
@ -5269,7 +5259,7 @@ async function predict3(input, config3) {
lastTime3 = now();
tf9.dispose(resize);
const obj = await process3(objectT, outputSize2, config3);
last = obj;
last2 = obj;
resolve(obj);
});
}
@ -5342,7 +5332,7 @@ function max2d(inputs, minScore) {
return [0, 0, newScore];
});
}
async function predict4(image25, config3) {
async function predict4(image24, config3) {
const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
const skipFrame = skipped5 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
@ -5355,7 +5345,7 @@ async function predict4(image25, config3) {
const tensor3 = tf10.tidy(() => {
if (!(model5 == null ? void 0 : model5.inputs[0].shape))
return null;
const resize = tf10.image.resizeBilinear(image25, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
const resize = tf10.image.resizeBilinear(image24, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
const enhance3 = tf10.mul(resize, 2);
const norm = enhance3.sub(1);
return norm;
@ -5367,10 +5357,10 @@ async function predict4(image25, config3) {
tf10.dispose(tensor3);
if (resT) {
cache2.keypoints.length = 0;
const squeeze8 = resT.squeeze();
const squeeze9 = resT.squeeze();
tf10.dispose(resT);
const stack4 = squeeze8.unstack(2);
tf10.dispose(squeeze8);
const stack4 = squeeze9.unstack(2);
tf10.dispose(squeeze9);
for (let id = 0; id < stack4.length; id++) {
const [x2, y2, partScore] = max2d(stack4[id], config3.body.minConfidence);
if (partScore > (((_a = config3.body) == null ? void 0 : _a.minConfidence) || 0)) {
@ -5382,8 +5372,8 @@ async function predict4(image25, config3) {
y2 / model5.inputs[0].shape[1]
],
position: [
Math.round(image25.shape[2] * x2 / model5.inputs[0].shape[2]),
Math.round(image25.shape[1] * y2 / model5.inputs[0].shape[1])
Math.round(image24.shape[2] * x2 / model5.inputs[0].shape[2]),
Math.round(image24.shape[1] * y2 / model5.inputs[0].shape[1])
]
});
}
@ -5425,7 +5415,7 @@ async function predict4(image25, config3) {
var tf11 = __toModule(require_tfjs_esm());
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model6;
var last2 = [];
var last3 = [];
var lastCount2 = 0;
var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER;
@ -5444,15 +5434,15 @@ async function load6(config3) {
log("cached model:", model6["modelUrl"]);
return model6;
}
async function predict5(image25, config3, idx, count2) {
async function predict5(image24, config3, idx, count2) {
var _a, _b;
if (!model6)
return null;
const skipFrame = skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last2[idx] && last2[idx].length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last3[idx] && last3[idx].length > 0) {
skipped6++;
return last2[idx];
return last3[idx];
}
skipped6 = 0;
return new Promise(async (resolve) => {
@ -5461,7 +5451,7 @@ async function predict5(image25, config3, idx, count2) {
if ((_a2 = config3.face.emotion) == null ? void 0 : _a2.enabled) {
const t = {};
const inputSize8 = (model6 == null ? void 0 : model6.inputs[0].shape) ? model6.inputs[0].shape[2] : 0;
t.resize = tf11.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
t.resize = tf11.image.resizeBilinear(image24, [inputSize8, inputSize8], false);
[t.red, t.green, t.blue] = tf11.split(t.resize, 3, 3);
t.redNorm = tf11.mul(t.red, rgb[0]);
t.greenNorm = tf11.mul(t.green, rgb[1]);
@ -5479,7 +5469,7 @@ async function predict5(image25, config3, idx, count2) {
obj.sort((a, b) => b.score - a.score);
Object.keys(t).forEach((tensor3) => tf11.dispose(t[tensor3]));
}
last2[idx] = obj;
last3[idx] = obj;
lastCount2 = count2;
resolve(obj);
});
@ -5629,7 +5619,7 @@ var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var enlargeFact = 1.6;
async function predict6(input, config3) {
var _a, _b, _c, _d, _e, _f, _g, _h;
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
const skipTime = (((_a = config3.face.detector) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
@ -5673,8 +5663,13 @@ async function predict6(input, config3) {
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
}
if ((_f = config3 == null ? void 0 : config3.filter) == null ? void 0 : _f.equalization) {
const equilized = await histogramEqualization(face5.tensor);
tf13.dispose(face5.tensor);
face5.tensor = equilized;
}
face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!((_f = config3.face.mesh) == null ? void 0 : _f.enabled)) {
if (!((_g = config3.face.mesh) == null ? void 0 : _g.enabled)) {
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
@ -5696,10 +5691,10 @@ async function predict6(input, config3) {
const coordsReshaped = tf13.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tf13.dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face5.faceScore < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
if (face5.faceScore < (((_h = config3.face.detector) == null ? void 0 : _h.minConfidence) || 1)) {
box4.confidence = face5.faceScore;
} else {
if ((_h = config3.face.iris) == null ? void 0 : _h.enabled)
if ((_i = config3.face.iris) == null ? void 0 : _i.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
@ -5740,7 +5735,7 @@ var uvmap = UV468;
// src/face/faceres.ts
var tf14 = __toModule(require_tfjs_esm());
var model9;
var last3 = [];
var last4 = [];
var lastTime7 = 0;
var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER;
@ -5768,15 +5763,15 @@ function enhance2(input) {
tf14.dispose(crop2);
return norm;
}
async function predict7(image25, config3, idx, count2) {
async function predict7(image24, config3, idx, count2) {
var _a, _b, _c, _d;
if (!model9)
return null;
const skipFrame = skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last3[idx]) == null ? void 0 : _c.age) && ((_d = last3[idx]) == null ? void 0 : _d.age) > 0) {
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last4[idx]) == null ? void 0 : _c.age) && ((_d = last4[idx]) == null ? void 0 : _d.age) > 0) {
skipped8++;
return last3[idx];
return last4[idx];
}
skipped8 = 0;
return new Promise(async (resolve) => {
@ -5788,7 +5783,7 @@ async function predict7(image25, config3, idx, count2) {
descriptor: []
};
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
const enhanced = enhance2(image25);
const enhanced = enhance2(image24);
const resT = model9 == null ? void 0 : model9.execute(enhanced);
lastTime7 = now();
tf14.dispose(enhanced);
@ -5810,7 +5805,7 @@ async function predict7(image25, config3, idx, count2) {
obj.descriptor = Array.from(descriptor);
resT.forEach((t) => tf14.dispose(t));
}
last3[idx] = obj;
last4[idx] = obj;
lastCount3 = count2;
resolve(obj);
});
@ -5836,16 +5831,16 @@ function getBoxCenter2(box4) {
box4.startPoint[1] + (box4.endPoint[1] - box4.startPoint[1]) / 2
];
}
function cutBoxFromImageAndResize2(box4, image25, cropSize) {
const h = image25.shape[1];
const w = image25.shape[2];
function cutBoxFromImageAndResize2(box4, image24, cropSize) {
const h = image24.shape[1];
const w = image24.shape[2];
const boxes = [[
box4.startPoint[1] / h,
box4.startPoint[0] / w,
box4.endPoint[1] / h,
box4.endPoint[0] / w
]];
return tf15.image.cropAndResize(image25, boxes, [0], cropSize);
return tf15.image.cropAndResize(image24, boxes, [0], cropSize);
}
function scaleBoxCoordinates2(box4, factor) {
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
@ -9028,13 +9023,13 @@ var HandPipeline = class {
Math.trunc(coord[2])
]);
}
async estimateHands(image25, config3) {
async estimateHands(image24, config3) {
let useFreshBox = false;
let boxes;
const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
boxes = await this.handDetector.predict(image25, config3);
boxes = await this.handDetector.predict(image24, config3);
this.skipped = 0;
}
if (config3.skipAllowed)
@ -9053,8 +9048,8 @@ var HandPipeline = class {
if (config3.hand.landmarks) {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
const palmCenterNormalized = [palmCenter[0] / image24.shape[2], palmCenter[1] / image24.shape[1]];
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image24, angle, 0, palmCenterNormalized) : image24.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9997,7 +9992,7 @@ async function load11(config3) {
inputSize7 = 256;
return model10;
}
async function parseSinglePose(res, config3, image25, inputBox) {
async function parseSinglePose(res, config3, image24, inputBox) {
const kpt4 = res[0][0];
const keypoints = [];
let score = 0;
@ -10013,15 +10008,15 @@ async function parseSinglePose(res, config3, image25, inputBox) {
part: kpt3[id],
positionRaw,
position: [
Math.round((image25.shape[2] || 0) * positionRaw[0]),
Math.round((image25.shape[1] || 0) * positionRaw[1])
Math.round((image24.shape[2] || 0) * positionRaw[0]),
Math.round((image24.shape[1] || 0) * positionRaw[1])
]
});
}
}
score = keypoints.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
const bodies = [];
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
const annotations2 = {};
for (const [name, indexes] of Object.entries(connected3)) {
const pt = [];
@ -10038,7 +10033,7 @@ async function parseSinglePose(res, config3, image25, inputBox) {
bodies.push(body4);
return bodies;
}
async function parseMultiPose(res, config3, image25, inputBox) {
async function parseMultiPose(res, config3, image24, inputBox) {
const bodies = [];
for (let id = 0; id < res[0].length; id++) {
const kpt4 = res[0][id];
@ -10056,11 +10051,11 @@ async function parseMultiPose(res, config3, image25, inputBox) {
part: kpt3[i],
score: Math.round(100 * score) / 100,
positionRaw,
position: [Math.round((image25.shape[2] || 0) * positionRaw[0]), Math.round((image25.shape[1] || 0) * positionRaw[1])]
position: [Math.round((image24.shape[2] || 0) * positionRaw[0]), Math.round((image24.shape[1] || 0) * positionRaw[1])]
});
}
}
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
const annotations2 = {};
for (const [name, indexes] of Object.entries(connected3)) {
const pt = [];
@ -10113,7 +10108,7 @@ async function predict10(input, config3) {
// src/object/nanodet.ts
var tf22 = __toModule(require_tfjs_esm());
var model11;
var last4 = [];
var last5 = [];
var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5;
@ -10193,19 +10188,19 @@ async function process4(res, inputSize8, outputShape, config3) {
results = results.filter((_val, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
return results;
}
async function predict11(image25, config3) {
async function predict11(image24, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && last5.length > 0) {
skipped11++;
return last4;
return last5;
}
skipped11 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last4;
return last5;
return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]];
const resize = tf22.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
const outputSize2 = [image24.shape[2], image24.shape[1]];
const resize = tf22.image.resizeBilinear(image24, [model11.inputSize, model11.inputSize], false);
const norm = tf22.div(resize, 255);
const transpose = norm.transpose([0, 3, 1, 2]);
tf22.dispose(norm);
@ -10216,7 +10211,7 @@ async function predict11(image25, config3) {
lastTime10 = now();
tf22.dispose(transpose);
const obj = await process4(objectT, model11.inputSize, outputSize2, config3);
last4 = obj;
last5 = obj;
resolve(obj);
});
}
@ -10586,7 +10581,7 @@ async function process5(input, background, config3) {
busy = true;
if (!model13)
await load14(config3);
const inputImage = process2(input, config3);
const inputImage = await process2(input, config3);
const width = ((_a = inputImage.canvas) == null ? void 0 : _a.width) || 0;
const height = ((_b = inputImage.canvas) == null ? void 0 : _b.height) || 0;
if (!inputImage.tensor)
@ -10637,7 +10632,7 @@ async function process5(input, background, config3) {
let mergedCanvas = null;
if (background && compositeCanvas) {
mergedCanvas = canvas(width, height);
const bgImage = process2(background, config3);
const bgImage = await process2(background, config3);
tf24.dispose(bgImage.tensor);
const ctxMerge = mergedCanvas.getContext("2d");
ctxMerge.drawImage(bgImage.canvas, 0, 0, mergedCanvas.width, mergedCanvas.height);
@ -10880,6 +10875,7 @@ function registerCustomOps() {
kernelFunc: (op) => tf26.tidy(() => tf26.sub(op.inputs.a, tf26.mul(tf26.div(op.inputs.a, op.inputs.b), op.inputs.b)))
};
tf26.registerKernel(kernelMod);
env.kernels.push("mod");
}
if (!env.kernels.includes("floormod")) {
const kernelMod = {
@ -10888,8 +10884,8 @@ function registerCustomOps() {
kernelFunc: (op) => tf26.tidy(() => tf26.floorDiv(op.inputs.a / op.inputs.b) * op.inputs.b + tf26.mod(op.inputs.a, op.inputs.b))
};
tf26.registerKernel(kernelMod);
env.kernels.push("floormod");
}
env.updateBackend();
}
async function check(instance, force = false) {
instance.state = "backend";
@ -10976,7 +10972,7 @@ async function check(instance, force = false) {
await tf26.ready();
instance.performance.initBackend = Math.trunc(now() - timeStamp);
instance.config.backend = tf26.getBackend();
env.updateBackend();
await env.updateBackend();
registerCustomOps();
}
return true;
@ -11675,8 +11671,9 @@ var body2 = (res) => {
gestures.push({ body: i, gesture: "raise right hand" });
const leftShoulder = res[i].keypoints.find((a) => a.part === "leftShoulder");
const rightShoulder = res[i].keypoints.find((a) => a.part === "rightShoulder");
if (leftShoulder && rightShoulder)
if (leftShoulder && rightShoulder && Math.abs(leftShoulder.positionRaw[1] - rightShoulder.positionRaw[1]) > 0.1) {
gestures.push({ body: i, gesture: `leaning ${leftShoulder.position[1] > rightShoulder.position[1] ? "left" : "right"}` });
}
}
return gestures;
};
@ -13014,7 +13011,7 @@ var Human = class {
await this.load();
timeStamp = now();
this.state = "image";
const img = process2(input, this.config);
const img = await process2(input, this.config);
this.process = img;
this.performance.inputProcess = this.env.perfadd ? (this.performance.inputProcess || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:");
@ -13034,7 +13031,7 @@ var Human = class {
this.performance.totalFrames++;
if (this.config.skipAllowed)
this.performance.cachedFrames++;
this.performance.inputCheck = this.env.perfadd ? (this.performance.inputCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.performance.cacheCheck = this.env.perfadd ? (this.performance.cacheCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes = [];
let bodyRes = [];

235
dist/human.node.js vendored
View File

@ -152,6 +152,7 @@ var config = {
warmup: "full",
cacheSensitivity: 0.7,
skipAllowed: false,
deallocate: false,
filter: {
enabled: true,
equalization: false,
@ -918,8 +919,8 @@ function GLImageFilter() {
this.get = function() {
return filterChain;
};
this.apply = function(image25) {
resize(image25.width, image25.height);
this.apply = function(image24) {
resize(image24.width, image24.height);
drawCount = 0;
if (!sourceTexture)
sourceTexture = gl.createTexture();
@ -928,7 +929,7 @@ function GLImageFilter() {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image25);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image24);
for (let i = 0; i < filterChain.length; i++) {
lastInChain = i === filterChain.length - 1;
const f = filterChain[i];
@ -936,25 +937,28 @@ function GLImageFilter() {
}
return fxcanvas;
};
this.draw = function(image25) {
this.draw = function(image24) {
this.add("brightness", 0);
return this.apply(image25);
return this.apply(image24);
};
}
// src/image/enhance.ts
var tf = __toModule(require_tfjs_esm());
function histogramEqualization(input) {
const channels = tf.split(input, 3, 2);
async function histogramEqualization(inputImage) {
const squeeze9 = inputImage.shape.length === 4 ? tf.squeeze(inputImage) : inputImage;
const channels = tf.split(squeeze9, 3, 2);
const min2 = [tf.min(channels[0]), tf.min(channels[1]), tf.min(channels[2])];
const max4 = [tf.max(channels[0]), tf.max(channels[1]), tf.max(channels[2])];
const sub6 = [tf.sub(channels[0], min2[0]), tf.sub(channels[1], min2[1]), tf.sub(channels[2], min2[2])];
const absMax = await Promise.all(max4.map((channel) => channel.data()));
const maxValue = 0.99 * Math.max(absMax[0][0], absMax[1][0], absMax[2][0]);
const sub7 = [tf.sub(channels[0], min2[0]), tf.sub(channels[1], min2[1]), tf.sub(channels[2], min2[2])];
const range = [tf.sub(max4[0], min2[0]), tf.sub(max4[1], min2[1]), tf.sub(max4[2], min2[2])];
const fact = [tf.div(255, range[0]), tf.div(255, range[1]), tf.div(255, range[2])];
const enh = [tf.mul(sub6[0], fact[0]), tf.mul(sub6[1], fact[1]), tf.mul(sub6[2], fact[2])];
const fact = [tf.div(maxValue, range[0]), tf.div(maxValue, range[1]), tf.div(maxValue, range[2])];
const enh = [tf.mul(sub7[0], fact[0]), tf.mul(sub7[1], fact[1]), tf.mul(sub7[2], fact[2])];
const rgb2 = tf.stack([enh[0], enh[1], enh[2]], 2);
const reshape8 = tf.reshape(rgb2, [1, input.shape[0], input.shape[1], 3]);
tf.dispose([...channels, ...min2, ...max4, ...sub6, ...range, ...fact, ...enh, rgb2]);
const reshape8 = tf.reshape(rgb2, [1, squeeze9.shape[0], squeeze9.shape[1], 3]);
tf.dispose([...channels, ...min2, ...max4, ...sub7, ...range, ...fact, ...enh, rgb2, squeeze9]);
return reshape8;
}
@ -964,6 +968,12 @@ var inCanvas = null;
var outCanvas = null;
var tmpCanvas = null;
var fx;
var last = {
inputSum: 0,
cacheDiff: 1,
sumMethod: 0,
inputTensor: void 0
};
function canvas(width, height) {
let c;
if (env.browser) {
@ -990,7 +1000,7 @@ function copy(input, output) {
ctx.drawImage(input, 0, 0);
return outputCanvas;
}
function process2(input, config3, getTensor = true) {
async function process2(input, config3, getTensor = true) {
if (!input) {
if (config3.debug)
log("input is missing");
@ -1040,7 +1050,7 @@ function process2(input, config3, getTensor = true) {
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("input cannot determine dimension");
if (!inCanvas || inCanvas.width !== targetWidth || inCanvas.height !== targetHeight)
if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const inCtx = inCanvas.getContext("2d");
if (typeof ImageData !== "undefined" && input instanceof ImageData) {
@ -1049,13 +1059,13 @@ function process2(input, config3, getTensor = true) {
if (config3.filter.flip && typeof inCtx.translate !== "undefined") {
inCtx.translate(originalWidth, 0);
inCtx.scale(-1, 1);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
inCtx.setTransform(1, 0, 0, 1, 0, 0);
} else {
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas == null ? void 0 : inCanvas.width, inCanvas == null ? void 0 : inCanvas.height);
}
}
if (!outCanvas || inCanvas.width !== outCanvas.width || inCanvas.height !== outCanvas.height)
if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height))
outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env.webgl.supported) {
if (!fx)
@ -1143,51 +1153,31 @@ function process2(input, config3, getTensor = true) {
if (!pixels)
throw new Error("cannot create tensor from input");
const casted = tf2.cast(pixels, "float32");
const tensor3 = config3.filter.equalization ? histogramEqualization(casted) : tf2.expandDims(casted, 0);
const tensor3 = config3.filter.equalization ? await histogramEqualization(casted) : tf2.expandDims(casted, 0);
tf2.dispose([pixels, casted]);
return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null };
}
}
var lastInputSum = 0;
var lastCacheDiff = 1;
var benchmarked = 0;
var checksum = async (input) => {
const resizeFact = 48;
const reduced = tf2.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]);
const tfSum = async () => {
const sumT = tf2.sum(reduced);
const sum0 = await sumT.data();
tf2.dispose(sumT);
return sum0[0];
};
const jsSum = async () => {
const reducedData = await reduced.data();
let sum0 = 0;
for (let i = 0; i < reducedData.length / 3; i++)
sum0 += reducedData[3 * i + 2];
return sum0;
};
if (benchmarked === 0) {
const t0 = now();
await jsSum();
const t1 = now();
await tfSum();
const t2 = now();
benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;
}
const res = benchmarked === 1 ? await jsSum() : await tfSum();
tf2.dispose(reduced);
return res;
};
async function skip(config3, input) {
let skipFrame = false;
if (config3.cacheSensitivity === 0)
return false;
const sum2 = await checksum(input);
const diff = 100 * (Math.max(sum2, lastInputSum) / Math.min(sum2, lastInputSum) - 1);
lastInputSum = sum2;
let skipFrame = diff < Math.max(config3.cacheSensitivity, lastCacheDiff);
lastCacheDiff = diff > 10 * config3.cacheSensitivity ? 0 : diff;
skipFrame = skipFrame && lastCacheDiff > 0;
return skipFrame;
if (!last.inputTensor) {
last.inputTensor = tf2.clone(input);
} else if (last.inputTensor.shape[1] !== input.shape[1] || last.inputTensor.shape[2] !== input.shape[2]) {
tf2.dispose(last.inputTensor);
last.inputTensor = tf2.clone(input);
} else {
const t = {};
t.diff = tf2.sub(input, last.inputTensor);
t.squared = tf2.mul(t.diff, t.diff);
t.sum = tf2.sum(t.squared);
const diffSum = await t.sum.data();
const diffRelative = diffSum[0] / (input.shape[1] || 1) / (input.shape[2] || 1) / 255 / 3;
tf2.dispose([last.inputTensor, t.diff, t.squared, t.sum]);
last.inputTensor = tf2.clone(input);
skipFrame = diffRelative <= config3.cacheSensitivity;
}
return skipFrame;
}
@ -1349,7 +1339,7 @@ async function load2(config3) {
log("cached model:", model2["modelUrl"]);
return model2;
}
async function predict(image25, config3, idx, count2) {
async function predict(image24, config3, idx, count2) {
var _a, _b;
if (!model2)
return null;
@ -1361,7 +1351,7 @@ async function predict(image25, config3, idx, count2) {
}
skipped2 = 0;
return new Promise(async (resolve) => {
const resize = tf5.image.resizeBilinear(image25, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
const resize = tf5.image.resizeBilinear(image24, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
const res = model2 == null ? void 0 : model2.execute(resize);
const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100;
@ -4681,10 +4671,10 @@ var scaleBoxCoordinates = (box4, factor) => {
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
};
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
const h = image25.shape[1];
const w = image25.shape[2];
const crop2 = tf6.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
var cutBoxFromImageAndResize = (box4, image24, cropSize) => {
const h = image24.shape[1];
const w = image24.shape[2];
const crop2 = tf6.image.cropAndResize(image24, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
const norm = tf6.div(crop2, 255);
tf6.dispose(crop2);
return norm;
@ -5189,7 +5179,7 @@ var labels = [
// src/object/centernet.ts
var model4;
var inputSize3 = 0;
var last = [];
var last2 = [];
var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) {
@ -5256,9 +5246,9 @@ async function process3(res, outputShape, config3) {
async function predict3(input, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && last2.length > 0) {
skipped4++;
return last;
return last2;
}
skipped4 = 0;
return new Promise(async (resolve) => {
@ -5268,7 +5258,7 @@ async function predict3(input, config3) {
lastTime3 = now();
tf9.dispose(resize);
const obj = await process3(objectT, outputSize2, config3);
last = obj;
last2 = obj;
resolve(obj);
});
}
@ -5341,7 +5331,7 @@ function max2d(inputs, minScore) {
return [0, 0, newScore];
});
}
async function predict4(image25, config3) {
async function predict4(image24, config3) {
const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
const skipFrame = skipped5 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
@ -5354,7 +5344,7 @@ async function predict4(image25, config3) {
const tensor3 = tf10.tidy(() => {
if (!(model5 == null ? void 0 : model5.inputs[0].shape))
return null;
const resize = tf10.image.resizeBilinear(image25, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
const resize = tf10.image.resizeBilinear(image24, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
const enhance3 = tf10.mul(resize, 2);
const norm = enhance3.sub(1);
return norm;
@ -5366,10 +5356,10 @@ async function predict4(image25, config3) {
tf10.dispose(tensor3);
if (resT) {
cache2.keypoints.length = 0;
const squeeze8 = resT.squeeze();
const squeeze9 = resT.squeeze();
tf10.dispose(resT);
const stack4 = squeeze8.unstack(2);
tf10.dispose(squeeze8);
const stack4 = squeeze9.unstack(2);
tf10.dispose(squeeze9);
for (let id = 0; id < stack4.length; id++) {
const [x2, y2, partScore] = max2d(stack4[id], config3.body.minConfidence);
if (partScore > (((_a = config3.body) == null ? void 0 : _a.minConfidence) || 0)) {
@ -5381,8 +5371,8 @@ async function predict4(image25, config3) {
y2 / model5.inputs[0].shape[1]
],
position: [
Math.round(image25.shape[2] * x2 / model5.inputs[0].shape[2]),
Math.round(image25.shape[1] * y2 / model5.inputs[0].shape[1])
Math.round(image24.shape[2] * x2 / model5.inputs[0].shape[2]),
Math.round(image24.shape[1] * y2 / model5.inputs[0].shape[1])
]
});
}
@ -5424,7 +5414,7 @@ async function predict4(image25, config3) {
var tf11 = __toModule(require_tfjs_esm());
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model6;
var last2 = [];
var last3 = [];
var lastCount2 = 0;
var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER;
@ -5443,15 +5433,15 @@ async function load6(config3) {
log("cached model:", model6["modelUrl"]);
return model6;
}
async function predict5(image25, config3, idx, count2) {
async function predict5(image24, config3, idx, count2) {
var _a, _b;
if (!model6)
return null;
const skipFrame = skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last2[idx] && last2[idx].length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last3[idx] && last3[idx].length > 0) {
skipped6++;
return last2[idx];
return last3[idx];
}
skipped6 = 0;
return new Promise(async (resolve) => {
@ -5460,7 +5450,7 @@ async function predict5(image25, config3, idx, count2) {
if ((_a2 = config3.face.emotion) == null ? void 0 : _a2.enabled) {
const t = {};
const inputSize8 = (model6 == null ? void 0 : model6.inputs[0].shape) ? model6.inputs[0].shape[2] : 0;
t.resize = tf11.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
t.resize = tf11.image.resizeBilinear(image24, [inputSize8, inputSize8], false);
[t.red, t.green, t.blue] = tf11.split(t.resize, 3, 3);
t.redNorm = tf11.mul(t.red, rgb[0]);
t.greenNorm = tf11.mul(t.green, rgb[1]);
@ -5478,7 +5468,7 @@ async function predict5(image25, config3, idx, count2) {
obj.sort((a, b) => b.score - a.score);
Object.keys(t).forEach((tensor3) => tf11.dispose(t[tensor3]));
}
last2[idx] = obj;
last3[idx] = obj;
lastCount2 = count2;
resolve(obj);
});
@ -5628,7 +5618,7 @@ var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0;
var enlargeFact = 1.6;
async function predict6(input, config3) {
var _a, _b, _c, _d, _e, _f, _g, _h;
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
const skipTime = (((_a = config3.face.detector) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || boxCache.length === 0) {
@ -5672,8 +5662,13 @@ async function predict6(input, config3) {
rotationMatrix = fixedRotationMatrix;
face5.tensor = cutBoxFromImageAndResize(box4, input, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
}
if ((_f = config3 == null ? void 0 : config3.filter) == null ? void 0 : _f.equalization) {
const equilized = await histogramEqualization(face5.tensor);
tf13.dispose(face5.tensor);
face5.tensor = equilized;
}
face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!((_f = config3.face.mesh) == null ? void 0 : _f.enabled)) {
if (!((_g = config3.face.mesh) == null ? void 0 : _g.enabled)) {
face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input);
face5.boxScore = Math.round(100 * box4.confidence || 0) / 100;
@ -5695,10 +5690,10 @@ async function predict6(input, config3) {
const coordsReshaped = tf13.reshape(contourCoords, [-1, 3]);
let rawCoords = await coordsReshaped.array();
tf13.dispose([contourCoords, coordsReshaped, confidence, contours]);
if (face5.faceScore < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
if (face5.faceScore < (((_h = config3.face.detector) == null ? void 0 : _h.minConfidence) || 1)) {
box4.confidence = face5.faceScore;
} else {
if ((_h = config3.face.iris) == null ? void 0 : _h.enabled)
if ((_i = config3.face.iris) == null ? void 0 : _i.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
@ -5739,7 +5734,7 @@ var uvmap = UV468;
// src/face/faceres.ts
var tf14 = __toModule(require_tfjs_esm());
var model9;
var last3 = [];
var last4 = [];
var lastTime7 = 0;
var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER;
@ -5767,15 +5762,15 @@ function enhance2(input) {
tf14.dispose(crop2);
return norm;
}
async function predict7(image25, config3, idx, count2) {
async function predict7(image24, config3, idx, count2) {
var _a, _b, _c, _d;
if (!model9)
return null;
const skipFrame = skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last3[idx]) == null ? void 0 : _c.age) && ((_d = last3[idx]) == null ? void 0 : _d.age) > 0) {
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last4[idx]) == null ? void 0 : _c.age) && ((_d = last4[idx]) == null ? void 0 : _d.age) > 0) {
skipped8++;
return last3[idx];
return last4[idx];
}
skipped8 = 0;
return new Promise(async (resolve) => {
@ -5787,7 +5782,7 @@ async function predict7(image25, config3, idx, count2) {
descriptor: []
};
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
const enhanced = enhance2(image25);
const enhanced = enhance2(image24);
const resT = model9 == null ? void 0 : model9.execute(enhanced);
lastTime7 = now();
tf14.dispose(enhanced);
@ -5809,7 +5804,7 @@ async function predict7(image25, config3, idx, count2) {
obj.descriptor = Array.from(descriptor);
resT.forEach((t) => tf14.dispose(t));
}
last3[idx] = obj;
last4[idx] = obj;
lastCount3 = count2;
resolve(obj);
});
@ -5835,16 +5830,16 @@ function getBoxCenter2(box4) {
box4.startPoint[1] + (box4.endPoint[1] - box4.startPoint[1]) / 2
];
}
function cutBoxFromImageAndResize2(box4, image25, cropSize) {
const h = image25.shape[1];
const w = image25.shape[2];
function cutBoxFromImageAndResize2(box4, image24, cropSize) {
const h = image24.shape[1];
const w = image24.shape[2];
const boxes = [[
box4.startPoint[1] / h,
box4.startPoint[0] / w,
box4.endPoint[1] / h,
box4.endPoint[0] / w
]];
return tf15.image.cropAndResize(image25, boxes, [0], cropSize);
return tf15.image.cropAndResize(image24, boxes, [0], cropSize);
}
function scaleBoxCoordinates2(box4, factor) {
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
@ -9027,13 +9022,13 @@ var HandPipeline = class {
Math.trunc(coord[2])
]);
}
async estimateHands(image25, config3) {
async estimateHands(image24, config3) {
let useFreshBox = false;
let boxes;
const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
boxes = await this.handDetector.predict(image25, config3);
boxes = await this.handDetector.predict(image24, config3);
this.skipped = 0;
}
if (config3.skipAllowed)
@ -9052,8 +9047,8 @@ var HandPipeline = class {
if (config3.hand.landmarks) {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
const palmCenterNormalized = [palmCenter[0] / image24.shape[2], palmCenter[1] / image24.shape[1]];
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image24, angle, 0, palmCenterNormalized) : image24.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9996,7 +9991,7 @@ async function load11(config3) {
inputSize7 = 256;
return model10;
}
async function parseSinglePose(res, config3, image25, inputBox) {
async function parseSinglePose(res, config3, image24, inputBox) {
const kpt4 = res[0][0];
const keypoints = [];
let score = 0;
@ -10012,15 +10007,15 @@ async function parseSinglePose(res, config3, image25, inputBox) {
part: kpt3[id],
positionRaw,
position: [
Math.round((image25.shape[2] || 0) * positionRaw[0]),
Math.round((image25.shape[1] || 0) * positionRaw[1])
Math.round((image24.shape[2] || 0) * positionRaw[0]),
Math.round((image24.shape[1] || 0) * positionRaw[1])
]
});
}
}
score = keypoints.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
const bodies = [];
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
const annotations2 = {};
for (const [name, indexes] of Object.entries(connected3)) {
const pt = [];
@ -10037,7 +10032,7 @@ async function parseSinglePose(res, config3, image25, inputBox) {
bodies.push(body4);
return bodies;
}
async function parseMultiPose(res, config3, image25, inputBox) {
async function parseMultiPose(res, config3, image24, inputBox) {
const bodies = [];
for (let id = 0; id < res[0].length; id++) {
const kpt4 = res[0][id];
@ -10055,11 +10050,11 @@ async function parseMultiPose(res, config3, image25, inputBox) {
part: kpt3[i],
score: Math.round(100 * score) / 100,
positionRaw,
position: [Math.round((image25.shape[2] || 0) * positionRaw[0]), Math.round((image25.shape[1] || 0) * positionRaw[1])]
position: [Math.round((image24.shape[2] || 0) * positionRaw[0]), Math.round((image24.shape[1] || 0) * positionRaw[1])]
});
}
}
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
const annotations2 = {};
for (const [name, indexes] of Object.entries(connected3)) {
const pt = [];
@ -10112,7 +10107,7 @@ async function predict10(input, config3) {
// src/object/nanodet.ts
var tf22 = __toModule(require_tfjs_esm());
var model11;
var last4 = [];
var last5 = [];
var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5;
@ -10192,19 +10187,19 @@ async function process4(res, inputSize8, outputShape, config3) {
results = results.filter((_val, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
return results;
}
async function predict11(image25, config3) {
async function predict11(image24, config3) {
const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
if (config3.skipAllowed && skipTime && skipFrame && last5.length > 0) {
skipped11++;
return last4;
return last5;
}
skipped11 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last4;
return last5;
return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]];
const resize = tf22.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
const outputSize2 = [image24.shape[2], image24.shape[1]];
const resize = tf22.image.resizeBilinear(image24, [model11.inputSize, model11.inputSize], false);
const norm = tf22.div(resize, 255);
const transpose = norm.transpose([0, 3, 1, 2]);
tf22.dispose(norm);
@ -10215,7 +10210,7 @@ async function predict11(image25, config3) {
lastTime10 = now();
tf22.dispose(transpose);
const obj = await process4(objectT, model11.inputSize, outputSize2, config3);
last4 = obj;
last5 = obj;
resolve(obj);
});
}
@ -10585,7 +10580,7 @@ async function process5(input, background, config3) {
busy = true;
if (!model13)
await load14(config3);
const inputImage = process2(input, config3);
const inputImage = await process2(input, config3);
const width = ((_a = inputImage.canvas) == null ? void 0 : _a.width) || 0;
const height = ((_b = inputImage.canvas) == null ? void 0 : _b.height) || 0;
if (!inputImage.tensor)
@ -10636,7 +10631,7 @@ async function process5(input, background, config3) {
let mergedCanvas = null;
if (background && compositeCanvas) {
mergedCanvas = canvas(width, height);
const bgImage = process2(background, config3);
const bgImage = await process2(background, config3);
tf24.dispose(bgImage.tensor);
const ctxMerge = mergedCanvas.getContext("2d");
ctxMerge.drawImage(bgImage.canvas, 0, 0, mergedCanvas.width, mergedCanvas.height);
@ -10879,6 +10874,7 @@ function registerCustomOps() {
kernelFunc: (op) => tf26.tidy(() => tf26.sub(op.inputs.a, tf26.mul(tf26.div(op.inputs.a, op.inputs.b), op.inputs.b)))
};
tf26.registerKernel(kernelMod);
env.kernels.push("mod");
}
if (!env.kernels.includes("floormod")) {
const kernelMod = {
@ -10887,8 +10883,8 @@ function registerCustomOps() {
kernelFunc: (op) => tf26.tidy(() => tf26.floorDiv(op.inputs.a / op.inputs.b) * op.inputs.b + tf26.mod(op.inputs.a, op.inputs.b))
};
tf26.registerKernel(kernelMod);
env.kernels.push("floormod");
}
env.updateBackend();
}
async function check(instance, force = false) {
instance.state = "backend";
@ -10975,7 +10971,7 @@ async function check(instance, force = false) {
await tf26.ready();
instance.performance.initBackend = Math.trunc(now() - timeStamp);
instance.config.backend = tf26.getBackend();
env.updateBackend();
await env.updateBackend();
registerCustomOps();
}
return true;
@ -11674,8 +11670,9 @@ var body2 = (res) => {
gestures.push({ body: i, gesture: "raise right hand" });
const leftShoulder = res[i].keypoints.find((a) => a.part === "leftShoulder");
const rightShoulder = res[i].keypoints.find((a) => a.part === "rightShoulder");
if (leftShoulder && rightShoulder)
if (leftShoulder && rightShoulder && Math.abs(leftShoulder.positionRaw[1] - rightShoulder.positionRaw[1]) > 0.1) {
gestures.push({ body: i, gesture: `leaning ${leftShoulder.position[1] > rightShoulder.position[1] ? "left" : "right"}` });
}
}
return gestures;
};
@ -13013,7 +13010,7 @@ var Human = class {
await this.load();
timeStamp = now();
this.state = "image";
const img = process2(input, this.config);
const img = await process2(input, this.config);
this.process = img;
this.performance.inputProcess = this.env.perfadd ? (this.performance.inputProcess || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:");
@ -13033,7 +13030,7 @@ var Human = class {
this.performance.totalFrames++;
if (this.config.skipAllowed)
this.performance.cachedFrames++;
this.performance.inputCheck = this.env.perfadd ? (this.performance.inputCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.performance.cacheCheck = this.env.perfadd ? (this.performance.cacheCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes = [];
let bodyRes = [];

2
dist/tfjs.esm.js vendored
View File

@ -69864,7 +69864,7 @@ registerBackend("wasm", async () => {
const { wasm } = await init();
return new BackendWasm(wasm);
}, WASM_PRIORITY);
var externalVersion = "3.11.0-20211105";
var externalVersion = "3.11.0-20211106";
var version8 = {
tfjs: externalVersion,
"tfjs-core": externalVersion,

View File

@ -74,7 +74,7 @@
"canvas": "^2.8.0",
"dayjs": "^1.10.7",
"esbuild": "^0.13.12",
"eslint": "8.1.0",
"eslint": "8.2.0",
"eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-html": "^6.2.0",
"eslint-plugin-import": "^2.25.2",

View File

@ -122,7 +122,9 @@ export interface SegmentationConfig extends GenericConfig {
export interface FilterConfig {
/** @property are image filters enabled? */
enabled: boolean,
/** @property perform image histogram equalization */
/** @property perform image histogram equalization
* - equalization is performed on input as a whole and detected face before its passed for further analysis
*/
equalization: boolean,
/** resize input width
* - if both width and height are set to 0, there is no resizing
@ -229,6 +231,9 @@ export interface Config {
*/
cacheSensitivity: number;
/** Perform immediate garbage collection on deallocated tensors instead of caching them */
deallocate: boolean;
/** Internal Variable */
skipAllowed: boolean;
@ -264,6 +269,7 @@ const config: Config = {
warmup: 'full',
cacheSensitivity: 0.70,
skipAllowed: false,
deallocate: false,
filter: {
enabled: true,
equalization: false,

View File

@ -13,10 +13,11 @@ import * as blazeface from './blazeface';
import * as util from './facemeshutil';
import * as coords from './facemeshcoords';
import * as iris from './iris';
import { histogramEqualization } from '../image/enhance';
import { env } from '../util/env';
import type { GraphModel, Tensor } from '../tfjs/types';
import type { FaceResult, Point } from '../result';
import type { Config } from '../config';
import { env } from '../util/env';
type BoxCache = { startPoint: Point, endPoint: Point, landmarks: Array<Point>, confidence: number };
let boxCache: Array<BoxCache> = [];
@ -73,6 +74,11 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
rotationMatrix = util.fixedRotationMatrix;
face.tensor = util.cutBoxFromImageAndResize(box, input, config.face.mesh?.enabled ? [inputSize, inputSize] : [blazeface.size(), blazeface.size()]);
}
if (config?.filter?.equalization) {
const equilized = await histogramEqualization(face.tensor as Tensor);
tf.dispose(face.tensor);
face.tensor = equilized;
}
face.boxScore = Math.round(100 * box.confidence) / 100;
if (!config.face.mesh?.enabled) { // mesh not enabled, return resuts from detector only
face.box = util.getClampedBox(box, input);

View File

@ -41,6 +41,9 @@ export function enhance(input): Tensor {
if (!model?.inputs[0].shape) return tensor; // model has no shape so no point continuing
// do a tight crop of image and resize it to fit the model
const crop = tf.image.resizeBilinear(tensor, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
const norm = tf.mul(crop, 255);
tf.dispose(crop);
return norm;
/*
const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
const crop = (tensor.shape.length === 3)
@ -78,9 +81,6 @@ export function enhance(input): Tensor {
const darken = crop.sub(crop.min());
const lighten = darken.div(darken.max());
*/
const norm = tf.mul(crop, 255);
tf.dispose(crop);
return norm;
}
export async function predict(image: Tensor, config: Config, idx, count) {

View File

@ -54,7 +54,9 @@ export const body = (res): GestureResult[] => {
// leaning
const leftShoulder = res[i].keypoints.find((a) => (a.part === 'leftShoulder'));
const rightShoulder = res[i].keypoints.find((a) => (a.part === 'rightShoulder'));
if (leftShoulder && rightShoulder) gestures.push({ body: i, gesture: `leaning ${(leftShoulder.position[1] > rightShoulder.position[1]) ? 'left' : 'right'}` });
if (leftShoulder && rightShoulder && Math.abs(leftShoulder.positionRaw[1] - rightShoulder.positionRaw[1]) > 0.1) {
gestures.push({ body: i, gesture: `leaning ${(leftShoulder.position[1] > rightShoulder.position[1]) ? 'left' : 'right'}` });
}
}
return gestures;
};

View File

@ -405,7 +405,7 @@ export class Human {
timeStamp = now();
this.state = 'image';
const img = image.process(input, this.config) as { canvas: HTMLCanvasElement | OffscreenCanvas, tensor: Tensor };
const img = await image.process(input, this.config) as { canvas: HTMLCanvasElement | OffscreenCanvas, tensor: Tensor };
this.process = img;
this.performance.inputProcess = this.env.perfadd ? (this.performance.inputProcess || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze('Get Image:');
@ -423,7 +423,7 @@ export class Human {
if (!this.performance.cachedFrames) this.performance.cachedFrames = 0;
(this.performance.totalFrames as number)++;
if (this.config.skipAllowed) this.performance.cachedFrames++;
this.performance.inputCheck = this.env.perfadd ? (this.performance.inputCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.performance.cacheCheck = this.env.perfadd ? (this.performance.cacheCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze('Check Changed:');
// prepare where to store model results

View File

@ -5,16 +5,20 @@
import * as tf from '../../dist/tfjs.esm.js';
import type { Tensor } from '../exports';
export function histogramEqualization(input: Tensor): Tensor {
const channels = tf.split(input, 3, 2);
export async function histogramEqualization(inputImage: Tensor): Promise<Tensor> {
// const maxValue = 254; // using 255 results in values slightly larger than 1 due to math rounding errors
const squeeze = inputImage.shape.length === 4 ? tf.squeeze(inputImage) : inputImage;
const channels = tf.split(squeeze, 3, 2);
const min: Tensor[] = [tf.min(channels[0]), tf.min(channels[1]), tf.min(channels[2])];
const max: Tensor[] = [tf.max(channels[0]), tf.max(channels[1]), tf.max(channels[2])];
const absMax = await Promise.all(max.map((channel) => channel.data()));
const maxValue = 0.99 * Math.max(absMax[0][0], absMax[1][0], absMax[2][0]);
const sub = [tf.sub(channels[0], min[0]), tf.sub(channels[1], min[1]), tf.sub(channels[2], min[2])];
const range = [tf.sub(max[0], min[0]), tf.sub(max[1], min[1]), tf.sub(max[2], min[2])];
const fact = [tf.div(255, range[0]), tf.div(255, range[1]), tf.div(255, range[2])];
const fact = [tf.div(maxValue, range[0]), tf.div(maxValue, range[1]), tf.div(maxValue, range[2])];
const enh = [tf.mul(sub[0], fact[0]), tf.mul(sub[1], fact[1]), tf.mul(sub[2], fact[2])];
const rgb = tf.stack([enh[0], enh[1], enh[2]], 2);
const reshape = tf.reshape(rgb, [1, input.shape[0], input.shape[1], 3]);
tf.dispose([...channels, ...min, ...max, ...sub, ...range, ...fact, ...enh, rgb]);
return reshape;
const reshape = tf.reshape(rgb, [1, squeeze.shape[0], squeeze.shape[1], 3]);
tf.dispose([...channels, ...min, ...max, ...sub, ...range, ...fact, ...enh, rgb, squeeze]);
return reshape; // output shape is [1, height, width, 3]
}

View File

@ -6,7 +6,7 @@ import * as tf from '../../dist/tfjs.esm.js';
import * as fxImage from './imagefx';
import type { Input, AnyCanvas, Tensor, Config } from '../exports';
import { env } from '../util/env';
import { log, now } from '../util/util';
import { log } from '../util/util';
import * as enhance from './enhance';
const maxSize = 2048;
@ -17,6 +17,13 @@ let tmpCanvas: AnyCanvas | null = null; // use global variable to avoid recreati
// @ts-ignore // imagefx is js module that should be converted to a class
let fx: fxImage.GLImageFilter | null; // instance of imagefx
const last: { inputSum: number, cacheDiff: number, sumMethod: number, inputTensor: undefined | Tensor } = {
inputSum: 0,
cacheDiff: 1,
sumMethod: 0,
inputTensor: undefined,
};
export function canvas(width, height): AnyCanvas {
let c;
if (env.browser) { // browser defines canvas object
@ -48,7 +55,7 @@ export function copy(input: AnyCanvas, output?: AnyCanvas) {
// process input image and return tensor
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement
// input is resized and run through imagefx filter
export function process(input: Input, config: Config, getTensor: boolean = true): { tensor: Tensor | null, canvas: AnyCanvas | null } {
export async function process(input: Input, config: Config, getTensor: boolean = true): Promise<{ tensor: Tensor | null, canvas: AnyCanvas | null }> {
if (!input) {
// throw new Error('input is missing');
if (config.debug) log('input is missing');
@ -108,7 +115,7 @@ export function process(input: Input, config: Config, getTensor: boolean = true)
if ((config.filter.height || 0) > 0) targetHeight = config.filter.height;
else if ((config.filter.width || 0) > 0) targetHeight = originalHeight * ((config.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight) throw new Error('input cannot determine dimension');
if (!inCanvas || (inCanvas.width !== targetWidth) || (inCanvas.height !== targetHeight)) inCanvas = canvas(targetWidth, targetHeight);
if (!inCanvas || (inCanvas?.width !== targetWidth) || (inCanvas?.height !== targetHeight)) inCanvas = canvas(targetWidth, targetHeight);
// draw input to our canvas
const inCtx = inCanvas.getContext('2d') as CanvasRenderingContext2D;
@ -118,14 +125,14 @@ export function process(input: Input, config: Config, getTensor: boolean = true)
if (config.filter.flip && typeof inCtx.translate !== 'undefined') {
inCtx.translate(originalWidth, 0);
inCtx.scale(-1, 1);
inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults
} else {
inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);
inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
}
}
if (!outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas.height !== outCanvas.height)) outCanvas = canvas(inCanvas.width, inCanvas.height); // init output canvas
if (!outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas?.height !== outCanvas?.height)) outCanvas = canvas(inCanvas.width, inCanvas.height); // init output canvas
// imagefx transforms using gl from input canvas to output canvas
if (config.filter.enabled && env.webgl.supported) {
@ -192,26 +199,16 @@ export function process(input: Input, config: Config, getTensor: boolean = true)
const rgb = tf.slice3d(pixels, [0, 0, 0], [-1, -1, 3]); // strip alpha channel
tf.dispose(pixels);
pixels = rgb;
/*
const channels = tf.split(pixels, 4, 2); // split rgba to channels
tf.dispose(pixels);
const rgb = tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb and ignore alpha
pixels = tf.reshape(rgb, [rgb.shape[0], rgb.shape[1], 3]); // move extra dim from the end of tensor and use it as batch number instead
tf.dispose([rgb, ...channels]);
*/
}
if (!pixels) throw new Error('cannot create tensor from input');
const casted = tf.cast(pixels, 'float32');
const tensor = config.filter.equalization ? enhance.histogramEqualization(casted) : tf.expandDims(casted, 0);
const tensor = config.filter.equalization ? await enhance.histogramEqualization(casted) : tf.expandDims(casted, 0);
tf.dispose([pixels, casted]);
return { tensor, canvas: (config.filter.return ? outCanvas : null) };
}
}
let lastInputSum = 0;
let lastCacheDiff = 1;
let benchmarked = 0;
/*
const checksum = async (input: Tensor): Promise<number> => { // use tf sum or js based sum loop depending on which is faster
const resizeFact = 48;
const reduced: Tensor = tf.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]);
@ -227,29 +224,51 @@ const checksum = async (input: Tensor): Promise<number> => { // use tf sum or js
for (let i = 0; i < reducedData.length / 3; i++) sum0 += reducedData[3 * i + 2]; // look only at green value of each pixel
return sum0;
};
if (benchmarked === 0) {
if (last.sumMethod === 0) {
const t0 = now();
await jsSum();
const t1 = now();
await tfSum();
const t2 = now();
benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;
last.sumMethod = t1 - t0 < t2 - t1 ? 1 : 2;
}
const res = benchmarked === 1 ? await jsSum() : await tfSum();
const res = last.sumMethod === 1 ? await jsSum() : await tfSum();
tf.dispose(reduced);
return res;
};
*/
export async function skip(config, input: Tensor) {
if (config.cacheSensitivity === 0) return false;
const sum = await checksum(input);
const diff = 100 * (Math.max(sum, lastInputSum) / Math.min(sum, lastInputSum) - 1);
lastInputSum = sum;
let skipFrame = false;
if (config.cacheSensitivity === 0) return skipFrame;
/*
const checkSum = await checksum(input);
const diff = 100 * (Math.max(checkSum, last.inputSum) / Math.min(checkSum, last.inputSum) - 1);
last.inputSum = checkSum;
// if previous frame was skipped, skip this frame if changed more than cacheSensitivity
// if previous frame was not skipped, then look for cacheSensitivity or difference larger than one in previous frame to avoid resetting cache in subsequent frames unnecessarily
let skipFrame = diff < Math.max(config.cacheSensitivity, lastCacheDiff);
let skipFrame = diff < Math.max(config.cacheSensitivity, last.cacheDiff);
// if difference is above 10x threshold, don't use last value to force reset cache for significant change of scenes or images
lastCacheDiff = diff > 10 * config.cacheSensitivity ? 0 : diff;
skipFrame = skipFrame && (lastCacheDiff > 0); // if no cached diff value then force no skip
last.cacheDiff = diff > 10 * config.cacheSensitivity ? 0 : diff;
skipFrame = skipFrame && (last.cacheDiff > 0); // if no cached diff value then force no skip
*/
if (!last.inputTensor) {
last.inputTensor = tf.clone(input);
} else if (last.inputTensor.shape[1] !== input.shape[1] || last.inputTensor.shape[2] !== input.shape[2]) { // input resolution changed
tf.dispose(last.inputTensor);
last.inputTensor = tf.clone(input);
} else {
const t: Record<string, Tensor> = {};
t.diff = tf.sub(input, last.inputTensor);
t.squared = tf.mul(t.diff, t.diff);
t.sum = tf.sum(t.squared);
const diffSum = await t.sum.data();
const diffRelative = diffSum[0] / (input.shape[1] || 1) / (input.shape[2] || 1) / 255 / 3; // squared difference relative to input resolution and averaged per channel
tf.dispose([last.inputTensor, t.diff, t.squared, t.sum]);
last.inputTensor = tf.clone(input);
skipFrame = diffRelative <= config.cacheSensitivity;
}
return skipFrame;
}

View File

@ -31,7 +31,7 @@ export async function process(input: Input, background: Input | undefined, confi
if (busy) return { data: [], canvas: null, alpha: null };
busy = true;
if (!model) await load(config);
const inputImage = image.process(input, config);
const inputImage = await image.process(input, config);
const width = inputImage.canvas?.width || 0;
const height = inputImage.canvas?.height || 0;
if (!inputImage.tensor) return { data: [], canvas: null, alpha: null };
@ -85,7 +85,7 @@ export async function process(input: Input, background: Input | undefined, confi
let mergedCanvas: HTMLCanvasElement | OffscreenCanvas | null = null;
if (background && compositeCanvas) { // draw background with segmentation as overlay if background is present
mergedCanvas = image.canvas(width, height);
const bgImage = image.process(background, config);
const bgImage = await image.process(background, config);
tf.dispose(bgImage.tensor);
const ctxMerge = mergedCanvas.getContext('2d') as CanvasRenderingContext2D;
ctxMerge.drawImage(bgImage.canvas as HTMLCanvasElement, 0, 0, mergedCanvas.width, mergedCanvas.height);

View File

@ -13,6 +13,7 @@ function registerCustomOps() {
kernelFunc: (op) => tf.tidy(() => tf.sub(op.inputs.a, tf.mul(tf.div(op.inputs.a, op.inputs.b), op.inputs.b))),
};
tf.registerKernel(kernelMod);
env.kernels.push('mod');
}
if (!env.kernels.includes('floormod')) {
const kernelMod = {
@ -21,8 +22,8 @@ function registerCustomOps() {
kernelFunc: (op) => tf.tidy(() => tf.floorDiv(op.inputs.a / op.inputs.b) * op.inputs.b + tf.mod(op.inputs.a, op.inputs.b)),
};
tf.registerKernel(kernelMod);
env.kernels.push('floormod');
}
env.updateBackend();
}
export async function check(instance, force = false) {
@ -123,8 +124,9 @@ export async function check(instance, force = false) {
instance.performance.initBackend = Math.trunc(now() - timeStamp);
instance.config.backend = tf.getBackend();
env.updateBackend(); // update env on backend init
await env.updateBackend(); // update env on backend init
registerCustomOps();
// await env.updateBackend(); // update env on backend init
}
return true;
}

View File

@ -68,13 +68,11 @@ export async function register(instance): Promise<void> {
log('possible browser memory leak using webgl or conflict with multiple backend registrations');
instance.emit('error');
throw new Error('browser webgl error');
/*
log('resetting humangl backend');
env.initial = true;
models.reset(instance);
await tf.removeBackend(config.name);
await register(instance); // re-register
*/
// log('resetting humangl backend');
// env.initial = true;
// models.reset(instance);
// await tf.removeBackend(config.name);
// await register(instance); // re-register
});
config.canvas.addEventListener('webglcontextrestored', (e) => {
log('error: humangl context restored:', e);

View File

@ -1,25 +1,25 @@
2021-11-05 15:16:48 INFO:  @vladmandic/human version 2.5.0
2021-11-05 15:16:48 INFO:  User: vlado Platform: linux Arch: x64 Node: v17.0.1
2021-11-05 15:16:48 INFO:  Application: {"name":"@vladmandic/human","version":"2.5.0"}
2021-11-05 15:16:48 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2021-11-05 15:16:48 INFO:  Toolchain: {"build":"0.6.3","esbuild":"0.13.12","typescript":"4.4.4","typedoc":"0.22.7","eslint":"8.1.0"}
2021-11-05 15:16:48 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2021-11-05 15:16:48 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]}
2021-11-05 15:16:48 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-05 15:16:48 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":56,"inputBytes":519015,"outputBytes":439589}
2021-11-05 15:16:48 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-05 15:16:48 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":56,"inputBytes":519023,"outputBytes":439593}
2021-11-05 15:16:48 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-05 15:16:48 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":56,"inputBytes":519090,"outputBytes":439665}
2021-11-05 15:16:48 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-05 15:16:48 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2329,"outputBytes":850}
2021-11-05 15:16:48 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":56,"inputBytes":518590,"outputBytes":441280}
2021-11-05 15:16:49 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2562175,"outputBytes":2497378}
2021-11-05 15:16:49 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":56,"inputBytes":3015118,"outputBytes":1611255}
2021-11-05 15:16:49 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":56,"inputBytes":3015118,"outputBytes":2944528}
2021-11-05 15:17:05 STATE: Typings: {"input":"src/human.ts","output":"types","files":49}
2021-11-05 15:17:11 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":48,"generated":true}
2021-11-05 15:17:11 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5812,"outputBytes":3779}
2021-11-05 15:17:41 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":90,"errors":0,"warnings":0}
2021-11-05 15:17:42 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2021-11-05 15:17:42 INFO:  Done...
2021-11-06 10:12:23 INFO:  @vladmandic/human version 2.5.0
2021-11-06 10:12:23 INFO:  User: vlado Platform: linux Arch: x64 Node: v17.0.1
2021-11-06 10:12:23 INFO:  Application: {"name":"@vladmandic/human","version":"2.5.0"}
2021-11-06 10:12:23 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2021-11-06 10:12:23 INFO:  Toolchain: {"build":"0.6.3","esbuild":"0.13.12","typescript":"4.4.4","typedoc":"0.22.7","eslint":"8.2.0"}
2021-11-06 10:12:23 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2021-11-06 10:12:23 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]}
2021-11-06 10:12:23 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-06 10:12:23 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":56,"inputBytes":520786,"outputBytes":440096}
2021-11-06 10:12:23 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-06 10:12:23 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":56,"inputBytes":520794,"outputBytes":440100}
2021-11-06 10:12:23 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-06 10:12:23 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":56,"inputBytes":520861,"outputBytes":440172}
2021-11-06 10:12:23 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-06 10:12:23 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2326,"outputBytes":912}
2021-11-06 10:12:23 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":56,"inputBytes":520423,"outputBytes":441983}
2021-11-06 10:12:23 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2562175,"outputBytes":2497378}
2021-11-06 10:12:24 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":56,"inputBytes":3016889,"outputBytes":3096312}
2021-11-06 10:12:24 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":56,"inputBytes":3016889,"outputBytes":2945011}
2021-11-06 10:12:43 STATE: Typings: {"input":"src/human.ts","output":"types","files":49}
2021-11-06 10:12:49 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":48,"generated":true}
2021-11-06 10:12:49 STATE: Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5851,"outputBytes":3818}
2021-11-06 10:13:25 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":90,"errors":0,"warnings":0}
2021-11-06 10:13:26 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2021-11-06 10:13:26 INFO:  Done...

View File

@ -192,7 +192,7 @@ async function test(Human, inputConfig) {
else log('state', 'passed: warmup face result match');
config.warmup = 'body';
res = await testWarmup(human, 'default');
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 1 || res?.gesture?.length !== 6) log('error', 'failed: warmup body result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 1 || res?.gesture?.length !== 5) log('error', 'failed: warmup body result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
else log('state', 'passed: warmup body result match');
log('state', 'details:', {
face: { boxScore: res.face[0].boxScore, faceScore: res.face[0].faceScore, age: res.face[0].age, gender: res.face[0].gender, genderScore: res.face[0].genderScore },
@ -278,7 +278,7 @@ async function test(Human, inputConfig) {
config.body = { minConfidence: 0.0001 };
config.hand = { minConfidence: 0.0001 };
res = await testDetect(human, 'samples/in/ai-body.jpg', 'default');
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 2 || res?.gesture?.length !== 8) log('error', 'failed: sensitive result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 2 || res?.gesture?.length !== 7) log('error', 'failed: sensitive result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
else log('state', 'passed: sensitive result match');
// test sensitive details face

File diff suppressed because it is too large Load Diff

View File

@ -5,7 +5,7 @@
// export all from build bundle
export * from '@tensorflow/tfjs/dist/index.js';
// export * from '@tensorflow/tfjs-backend-webgl/dist/index.js';
export * from '@tensorflow/tfjs-backend-webgl/dist/index.js';
// export * from '@tensorflow/tfjs-backend-wasm/dist/index.js';
// add webgpu to bundle, experimental

File diff suppressed because one or more lines are too long

View File

@ -99,9 +99,9 @@ Possible events:</p>
</div></li></ul></section><section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class"><a id="enhance" class="tsd-anchor"></a><h3>enhance</h3><ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class"><li class="tsd-signature tsd-kind-icon">enhance<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><a href="Tensor.html" class="tsd-signature-type" data-tsd-kind="Class">Tensor</a><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="Tensor.html" class="tsd-signature-type" data-tsd-kind="Class">Tensor</a><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span></li></ul><ul class="tsd-descriptions"><li class="tsd-description"><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/human.ts#L266">src/human.ts:266</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Enhance method performs additional enhacements to face image previously detected for futher processing</p>
</div></div><h4 class="tsd-parameters-title">Parameters</h4><ul class="tsd-parameters"><li><h5>input: <a href="Tensor.html" class="tsd-signature-type" data-tsd-kind="Class">Tensor</a><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span></h5></li></ul><h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="Tensor.html" class="tsd-signature-type" data-tsd-kind="Class">Tensor</a><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span></h4><div><p>Tensor</p>
</div></li></ul></section><section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class"><a id="image" class="tsd-anchor"></a><h3>image</h3><ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class"><li class="tsd-signature tsd-kind-icon">image<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><a href="../index.html#Input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a>, getTensor<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="../index.html#AnyCanvas" class="tsd-signature-type" data-tsd-kind="Type alias">AnyCanvas</a><span class="tsd-signature-symbol">; </span>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="Tensor.html" class="tsd-signature-type" data-tsd-kind="Class">Tensor</a><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span><span class="tsd-signature-symbol"> }</span></li></ul><ul class="tsd-descriptions"><li class="tsd-description"><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/human.ts#L239">src/human.ts:239</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></li></ul></section><section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class"><a id="image" class="tsd-anchor"></a><h3>image</h3><ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class"><li class="tsd-signature tsd-kind-icon">image<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><a href="../index.html#Input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a>, getTensor<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-symbol">{ </span>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="../index.html#AnyCanvas" class="tsd-signature-type" data-tsd-kind="Type alias">AnyCanvas</a><span class="tsd-signature-symbol">; </span>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="Tensor.html" class="tsd-signature-type" data-tsd-kind="Class">Tensor</a><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">&gt;</span></li></ul><ul class="tsd-descriptions"><li class="tsd-description"><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/human.ts#L239">src/human.ts:239</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Process input as return canvas and tensor</p>
</div></div><h4 class="tsd-parameters-title">Parameters</h4><ul class="tsd-parameters"><li><h5>input: <a href="../index.html#Input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a></h5></li><li><h5>getTensor: <span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol"> = true</span></h5></li></ul><h4 class="tsd-returns-title">Returns <span class="tsd-signature-symbol">{ </span>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="../index.html#AnyCanvas" class="tsd-signature-type" data-tsd-kind="Type alias">AnyCanvas</a><span class="tsd-signature-symbol">; </span>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="Tensor.html" class="tsd-signature-type" data-tsd-kind="Class">Tensor</a><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span><span class="tsd-signature-symbol"> }</span></h4><div></div><ul class="tsd-parameters"><li class="tsd-parameter"><h5>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="../index.html#AnyCanvas" class="tsd-signature-type" data-tsd-kind="Type alias">AnyCanvas</a></h5></li><li class="tsd-parameter"><h5>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="Tensor.html" class="tsd-signature-type" data-tsd-kind="Class">Tensor</a><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span></h5></li></ul></li></ul></section><section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class"><a id="init" class="tsd-anchor"></a><h3>init</h3><ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class"><li class="tsd-signature tsd-kind-icon">init<span class="tsd-signature-symbol">(</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">void</span><span class="tsd-signature-symbol">&gt;</span></li></ul><ul class="tsd-descriptions"><li class="tsd-description"><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/human.ts#L277">src/human.ts:277</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div><h4 class="tsd-parameters-title">Parameters</h4><ul class="tsd-parameters"><li><h5>input: <a href="../index.html#Input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a></h5></li><li><h5>getTensor: <span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol"> = true</span></h5></li></ul><h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-symbol">{ </span>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="../index.html#AnyCanvas" class="tsd-signature-type" data-tsd-kind="Type alias">AnyCanvas</a><span class="tsd-signature-symbol">; </span>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="Tensor.html" class="tsd-signature-type" data-tsd-kind="Class">Tensor</a><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">&gt;</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">&gt;</span></h4><div></div></li></ul></section><section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class"><a id="init" class="tsd-anchor"></a><h3>init</h3><ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class"><li class="tsd-signature tsd-kind-icon">init<span class="tsd-signature-symbol">(</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">void</span><span class="tsd-signature-symbol">&gt;</span></li></ul><ul class="tsd-descriptions"><li class="tsd-description"><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/human.ts#L277">src/human.ts:277</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Explicit backend initialization</p>
<ul>
<li>Normally done implicitly during initial load phase</li>

View File

@ -75,7 +75,7 @@
<li>Can be TFJS that is bundled with <code>Human</code> or a manually imported TFJS library</li>
</ul>
</div><dl class="tsd-comment-tags"><dt>external</dt><dd><p><a href="https://js.tensorflow.org/api/latest/">API</a></p>
</dd></dl></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Variables</h2><section class="tsd-panel tsd-member tsd-kind-variable"><a id="defaults" class="tsd-anchor"></a><h3>defaults</h3><div class="tsd-signature tsd-kind-icon">defaults<span class="tsd-signature-symbol">:</span> <a href="interfaces/Config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> = ...</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L258">src/config.ts:258</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</dd></dl></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Variables</h2><section class="tsd-panel tsd-member tsd-kind-variable"><a id="defaults" class="tsd-anchor"></a><h3>defaults</h3><div class="tsd-signature tsd-kind-icon">defaults<span class="tsd-signature-symbol">:</span> <a href="interfaces/Config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> = ...</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L263">src/config.ts:263</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<ul>
<li><a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L253">See all default Config values...</a></li>
</ul>

View File

@ -1,10 +1,10 @@
<!DOCTYPE html><html class="default no-js"><head><meta charSet="utf-8"/><meta http-equiv="x-ua-compatible" content="IE=edge"/><title>Config | @vladmandic/human - v2.5.0</title><meta name="description" content="Documentation for @vladmandic/human - v2.5.0"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="stylesheet" href="../assets/style.css"/><link rel="stylesheet" href="../assets/highlight.css"/><script async src="../assets/search.js" id="search-script"></script></head><body><script>document.body.classList.add(localStorage.getItem("tsd-theme") || "os")</script><header><div class="tsd-page-toolbar"><div class="container"><div class="table-wrap"><div class="table-cell" id="tsd-search" data-base=".."><div class="field"><label for="tsd-search-field" class="tsd-widget search no-caption">Search</label><input type="text" id="tsd-search-field"/></div><ul class="results"><li class="state loading">Preparing search index...</li><li class="state failure">The search index is not available</li></ul><a href="../index.html" class="title">@vladmandic/human - v2.5.0</a></div><div class="table-cell" id="tsd-widgets"><div id="tsd-filter"><a href="#" class="tsd-widget options no-caption" data-toggle="options">Options</a><div class="tsd-filter-group"><div class="tsd-select" id="tsd-filter-visibility"><span class="tsd-select-label">All</span><ul class="tsd-select-list"><li data-value="public">Public</li><li data-value="protected">Public/Protected</li><li data-value="private" class="selected">All</li></ul></div> <input type="checkbox" id="tsd-filter-inherited" checked/><label class="tsd-widget" for="tsd-filter-inherited">Inherited</label></div></div><a href="#" class="tsd-widget menu no-caption" data-toggle="menu">Menu</a></div></div></div></div><div class="tsd-page-title"><div class="container"><ul class="tsd-breadcrumb"><li><a href="../index.html">@vladmandic/human - v2.5.0</a></li><li><a href="Config.html">Config</a></li></ul><h1>Interface Config</h1></div></div></header><div class="container container-main"><div class="row"><div class="col-8 col-content"><section class="tsd-panel tsd-comment"><div class="tsd-comment tsd-typography"><div class="lead">
<p>Configuration interface definition for <strong>Human</strong> library</p>
</div><div><p>Contains all configurable parameters</p>
</div></div></section><section class="tsd-panel tsd-hierarchy"><h3>Hierarchy</h3><ul class="tsd-hierarchy"><li><span class="target">Config</span></li></ul></section><section class="tsd-panel-group tsd-index-group"><h2>Index</h2><section class="tsd-panel tsd-index-panel"><div class="tsd-index-content"><section class="tsd-index-section "><h3>Properties</h3><ul class="tsd-index-list"><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipAllowed" class="tsd-kind-icon">skip<wbr/>Allowed</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></section></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Properties</h2><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="async" class="tsd-anchor"></a><h3>async</h3><div class="tsd-signature tsd-kind-icon">async<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L209">src/config.ts:209</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-hierarchy"><h3>Hierarchy</h3><ul class="tsd-hierarchy"><li><span class="target">Config</span></li></ul></section><section class="tsd-panel-group tsd-index-group"><h2>Index</h2><section class="tsd-panel tsd-index-panel"><div class="tsd-index-content"><section class="tsd-index-section "><h3>Properties</h3><ul class="tsd-index-list"><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#deallocate" class="tsd-kind-icon">deallocate</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipAllowed" class="tsd-kind-icon">skip<wbr/>Allowed</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></section></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Properties</h2><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="async" class="tsd-anchor"></a><h3>async</h3><div class="tsd-signature tsd-kind-icon">async<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L211">src/config.ts:211</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Perform model loading and inference concurrently or sequentially
default: <code>true</code></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="backend" class="tsd-anchor"></a><h3>backend</h3><div class="tsd-signature tsd-kind-icon">backend<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">&quot;&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;cpu&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;wasm&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;webgl&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;humangl&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;tensorflow&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;webgpu&quot;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L194">src/config.ts:194</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="backend" class="tsd-anchor"></a><h3>backend</h3><div class="tsd-signature tsd-kind-icon">backend<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">&quot;&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;cpu&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;wasm&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;webgl&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;humangl&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;tensorflow&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;webgpu&quot;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L196">src/config.ts:196</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Backend used for TFJS operations
valid build-in backends are:</p>
<ul>
@ -12,46 +12,48 @@ valid build-in backends are:</p>
<li>NodeJS: <code>cpu</code>, <code>wasm</code>, <code>tensorflow</code>
default: <code>humangl</code> for browser and <code>tensorflow</code> for nodejs</li>
</ul>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="body" class="tsd-anchor"></a><h3>body</h3><div class="tsd-signature tsd-kind-icon">body<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="BodyConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">BodyConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L245">src/config.ts:245</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="body" class="tsd-anchor"></a><h3>body</h3><div class="tsd-signature tsd-kind-icon">body<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="BodyConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">BodyConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L250">src/config.ts:250</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p><a href="BodyConfig.html">BodyConfig</a></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="cacheSensitivity" class="tsd-anchor"></a><h3>cache<wbr/>Sensitivity</h3><div class="tsd-signature tsd-kind-icon">cache<wbr/>Sensitivity<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L230">src/config.ts:230</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="cacheSensitivity" class="tsd-anchor"></a><h3>cache<wbr/>Sensitivity</h3><div class="tsd-signature tsd-kind-icon">cache<wbr/>Sensitivity<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L232">src/config.ts:232</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Cache sensitivity</p>
<ul>
<li>values 0..1 where 0.01 means reset cache if input changed more than 1%</li>
<li>set to 0 to disable caching
default: 0.7</li>
</ul>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="debug" class="tsd-anchor"></a><h3>debug</h3><div class="tsd-signature tsd-kind-icon">debug<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L204">src/config.ts:204</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="deallocate" class="tsd-anchor"></a><h3>deallocate</h3><div class="tsd-signature tsd-kind-icon">deallocate<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L235">src/config.ts:235</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Perform immediate garbage collection on deallocated tensors instead of caching them</p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="debug" class="tsd-anchor"></a><h3>debug</h3><div class="tsd-signature tsd-kind-icon">debug<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L206">src/config.ts:206</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Print debug statements to console
default: <code>true</code></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="face" class="tsd-anchor"></a><h3>face</h3><div class="tsd-signature tsd-kind-icon">face<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="FaceConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">FaceConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L242">src/config.ts:242</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="face" class="tsd-anchor"></a><h3>face</h3><div class="tsd-signature tsd-kind-icon">face<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="FaceConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">FaceConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L247">src/config.ts:247</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p><a href="FaceConfig.html">FaceConfig</a></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="filter" class="tsd-anchor"></a><h3>filter</h3><div class="tsd-signature tsd-kind-icon">filter<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="FilterConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">FilterConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L236">src/config.ts:236</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="filter" class="tsd-anchor"></a><h3>filter</h3><div class="tsd-signature tsd-kind-icon">filter<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="FilterConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">FilterConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L241">src/config.ts:241</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p><a href="FilterConfig.html">FilterConfig</a></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="gesture" class="tsd-anchor"></a><h3>gesture</h3><div class="tsd-signature tsd-kind-icon">gesture<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="GestureConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">GestureConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L239">src/config.ts:239</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="gesture" class="tsd-anchor"></a><h3>gesture</h3><div class="tsd-signature tsd-kind-icon">gesture<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="GestureConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">GestureConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L244">src/config.ts:244</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p><a href="GestureConfig.html">GestureConfig</a></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="hand" class="tsd-anchor"></a><h3>hand</h3><div class="tsd-signature tsd-kind-icon">hand<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="HandConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">HandConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L248">src/config.ts:248</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="hand" class="tsd-anchor"></a><h3>hand</h3><div class="tsd-signature tsd-kind-icon">hand<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="HandConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">HandConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L253">src/config.ts:253</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p><a href="HandConfig.html">HandConfig</a></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="modelBasePath" class="tsd-anchor"></a><h3>model<wbr/>Base<wbr/>Path</h3><div class="tsd-signature tsd-kind-icon">model<wbr/>Base<wbr/>Path<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">string</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L223">src/config.ts:223</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="modelBasePath" class="tsd-anchor"></a><h3>model<wbr/>Base<wbr/>Path</h3><div class="tsd-signature tsd-kind-icon">model<wbr/>Base<wbr/>Path<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">string</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L225">src/config.ts:225</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Base model path (typically starting with file://, http:// or https://) for all models</p>
<ul>
<li>individual modelPath values are relative to this path
default: <code>../models/</code> for browsers and <code>file://models/</code> for nodejs</li>
</ul>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="object" class="tsd-anchor"></a><h3>object</h3><div class="tsd-signature tsd-kind-icon">object<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="ObjectConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">ObjectConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L251">src/config.ts:251</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="object" class="tsd-anchor"></a><h3>object</h3><div class="tsd-signature tsd-kind-icon">object<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="ObjectConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">ObjectConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L256">src/config.ts:256</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p><a href="ObjectConfig.html">ObjectConfig</a></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="segmentation" class="tsd-anchor"></a><h3>segmentation</h3><div class="tsd-signature tsd-kind-icon">segmentation<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="SegmentationConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">SegmentationConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L254">src/config.ts:254</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="segmentation" class="tsd-anchor"></a><h3>segmentation</h3><div class="tsd-signature tsd-kind-icon">segmentation<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="SegmentationConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">SegmentationConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L259">src/config.ts:259</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p><a href="SegmentationConfig.html">SegmentationConfig</a></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="skipAllowed" class="tsd-anchor"></a><h3>skip<wbr/>Allowed</h3><div class="tsd-signature tsd-kind-icon">skip<wbr/>Allowed<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L233">src/config.ts:233</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="skipAllowed" class="tsd-anchor"></a><h3>skip<wbr/>Allowed</h3><div class="tsd-signature tsd-kind-icon">skip<wbr/>Allowed<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L238">src/config.ts:238</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Internal Variable</p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="warmup" class="tsd-anchor"></a><h3>warmup</h3><div class="tsd-signature tsd-kind-icon">warmup<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">&quot;face&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;body&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;none&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;full&quot;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L216">src/config.ts:216</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="warmup" class="tsd-anchor"></a><h3>warmup</h3><div class="tsd-signature tsd-kind-icon">warmup<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">&quot;face&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;body&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;none&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;full&quot;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L218">src/config.ts:218</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>What to use for <code>human.warmup()</code></p>
<ul>
<li>warmup pre-initializes all models for faster inference but can take significant time on startup</li>
<li>used by <code>webgl</code>, <code>humangl</code> and <code>webgpu</code> backends
default: <code>full</code></li>
</ul>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="wasmPath" class="tsd-anchor"></a><h3>wasm<wbr/>Path</h3><div class="tsd-signature tsd-kind-icon">wasm<wbr/>Path<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">string</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L199">src/config.ts:199</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="wasmPath" class="tsd-anchor"></a><h3>wasm<wbr/>Path</h3><div class="tsd-signature tsd-kind-icon">wasm<wbr/>Path<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">string</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L201">src/config.ts:201</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Path to *.wasm files if backend is set to <code>wasm</code>
default: auto-detects to link to CDN <code>jsdelivr</code> when running in browser</p>
</div></div></section></section></div><div class="col-4 col-menu menu-sticky-wrap menu-highlight"><nav class="tsd-navigation primary"><ul><li class=""><a href="../index.html">Exports</a></li><li class=" tsd-kind-namespace"><a href="../modules/Tensor.html">Tensor</a></li></ul></nav><nav class="tsd-navigation secondary menu-sticky"><ul><li class="current tsd-kind-interface"><a href="Config.html" class="tsd-kind-icon">Config</a><ul><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipAllowed" class="tsd-kind-icon">skip<wbr/>Allowed</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></li></ul></nav></div></div></div><footer class=""><div class="container"><h2>Legend</h2><div class="tsd-legend-group"><ul class="tsd-legend"><li class="tsd-kind-constructor tsd-parent-kind-class"><span class="tsd-kind-icon">Constructor</span></li><li class="tsd-kind-property tsd-parent-kind-class"><span class="tsd-kind-icon">Property</span></li><li class="tsd-kind-method tsd-parent-kind-class"><span class="tsd-kind-icon">Method</span></li></ul><ul class="tsd-legend"><li class="tsd-kind-property tsd-parent-kind-interface"><span class="tsd-kind-icon">Property</span></li></ul></div><h2>Settings</h2><p>Theme <select id="theme"><option value="os">OS</option><option value="light">Light</option><option value="dark">Dark</option></select></p></div></footer><div class="overlay"></div><script src="../assets/main.js"></script></body></html>
</div></div></section></section></div><div class="col-4 col-menu menu-sticky-wrap menu-highlight"><nav class="tsd-navigation primary"><ul><li class=""><a href="../index.html">Exports</a></li><li class=" tsd-kind-namespace"><a href="../modules/Tensor.html">Tensor</a></li></ul></nav><nav class="tsd-navigation secondary menu-sticky"><ul><li class="current tsd-kind-interface"><a href="Config.html" class="tsd-kind-icon">Config</a><ul><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#deallocate" class="tsd-kind-icon">deallocate</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipAllowed" class="tsd-kind-icon">skip<wbr/>Allowed</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></li></ul></nav></div></div></div><footer class=""><div class="container"><h2>Legend</h2><div class="tsd-legend-group"><ul class="tsd-legend"><li class="tsd-kind-constructor tsd-parent-kind-class"><span class="tsd-kind-icon">Constructor</span></li><li class="tsd-kind-property tsd-parent-kind-class"><span class="tsd-kind-icon">Property</span></li><li class="tsd-kind-method tsd-parent-kind-class"><span class="tsd-kind-icon">Method</span></li></ul><ul class="tsd-legend"><li class="tsd-kind-property tsd-parent-kind-interface"><span class="tsd-kind-icon">Property</span></li></ul></div><h2>Settings</h2><p>Theme <select id="theme"><option value="os">OS</option><option value="light">Light</option><option value="dark">Dark</option></select></p></div></footer><div class="overlay"></div><script src="../assets/main.js"></script></body></html>

View File

@ -4,31 +4,34 @@
<li>available only in Browser environments</li>
<li>image filters run with near-zero latency as they are executed on the GPU using WebGL</li>
</ul>
</div></div></section><section class="tsd-panel tsd-hierarchy"><h3>Hierarchy</h3><ul class="tsd-hierarchy"><li><span class="target">FilterConfig</span></li></ul></section><section class="tsd-panel-group tsd-index-group"><h2>Index</h2><section class="tsd-panel tsd-index-panel"><div class="tsd-index-content"><section class="tsd-index-section "><h3>Properties</h3><ul class="tsd-index-list"><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#blur" class="tsd-kind-icon">blur</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#brightness" class="tsd-kind-icon">brightness</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#contrast" class="tsd-kind-icon">contrast</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#enabled" class="tsd-kind-icon">enabled</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#equalization" class="tsd-kind-icon">equalization</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#flip" class="tsd-kind-icon">flip</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#height" class="tsd-kind-icon">height</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#hue" class="tsd-kind-icon">hue</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#kodachrome" class="tsd-kind-icon">kodachrome</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#negative" class="tsd-kind-icon">negative</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#pixelate" class="tsd-kind-icon">pixelate</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#polaroid" class="tsd-kind-icon">polaroid</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#return" class="tsd-kind-icon">return</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#saturation" class="tsd-kind-icon">saturation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#sepia" class="tsd-kind-icon">sepia</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#sharpness" class="tsd-kind-icon">sharpness</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#technicolor" class="tsd-kind-icon">technicolor</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#vintage" class="tsd-kind-icon">vintage</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#width" class="tsd-kind-icon">width</a></li></ul></section></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Properties</h2><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="blur" class="tsd-anchor"></a><h3>blur</h3><div class="tsd-signature tsd-kind-icon">blur<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L152">src/config.ts:152</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: 0 (no blur) to N (blur radius in pixels)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="brightness" class="tsd-anchor"></a><h3>brightness</h3><div class="tsd-signature tsd-kind-icon">brightness<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L146">src/config.ts:146</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: -1 (darken) to 1 (lighten)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="contrast" class="tsd-anchor"></a><h3>contrast</h3><div class="tsd-signature tsd-kind-icon">contrast<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L148">src/config.ts:148</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: -1 (reduce contrast) to 1 (increase contrast)</p>
</div></div></section><section class="tsd-panel tsd-hierarchy"><h3>Hierarchy</h3><ul class="tsd-hierarchy"><li><span class="target">FilterConfig</span></li></ul></section><section class="tsd-panel-group tsd-index-group"><h2>Index</h2><section class="tsd-panel tsd-index-panel"><div class="tsd-index-content"><section class="tsd-index-section "><h3>Properties</h3><ul class="tsd-index-list"><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#blur" class="tsd-kind-icon">blur</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#brightness" class="tsd-kind-icon">brightness</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#contrast" class="tsd-kind-icon">contrast</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#enabled" class="tsd-kind-icon">enabled</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#equalization" class="tsd-kind-icon">equalization</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#flip" class="tsd-kind-icon">flip</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#height" class="tsd-kind-icon">height</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#hue" class="tsd-kind-icon">hue</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#kodachrome" class="tsd-kind-icon">kodachrome</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#negative" class="tsd-kind-icon">negative</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#pixelate" class="tsd-kind-icon">pixelate</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#polaroid" class="tsd-kind-icon">polaroid</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#return" class="tsd-kind-icon">return</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#saturation" class="tsd-kind-icon">saturation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#sepia" class="tsd-kind-icon">sepia</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#sharpness" class="tsd-kind-icon">sharpness</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#technicolor" class="tsd-kind-icon">technicolor</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#vintage" class="tsd-kind-icon">vintage</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="FilterConfig.html#width" class="tsd-kind-icon">width</a></li></ul></section></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Properties</h2><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="blur" class="tsd-anchor"></a><h3>blur</h3><div class="tsd-signature tsd-kind-icon">blur<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L154">src/config.ts:154</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: 0 (no blur) to N (blur radius in pixels)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="brightness" class="tsd-anchor"></a><h3>brightness</h3><div class="tsd-signature tsd-kind-icon">brightness<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L148">src/config.ts:148</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: -1 (darken) to 1 (lighten)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="contrast" class="tsd-anchor"></a><h3>contrast</h3><div class="tsd-signature tsd-kind-icon">contrast<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L150">src/config.ts:150</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: -1 (reduce contrast) to 1 (increase contrast)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="enabled" class="tsd-anchor"></a><h3>enabled</h3><div class="tsd-signature tsd-kind-icon">enabled<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L124">src/config.ts:124</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>are image filters enabled?</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="equalization" class="tsd-anchor"></a><h3>equalization</h3><div class="tsd-signature tsd-kind-icon">equalization<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L126">src/config.ts:126</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>perform image histogram equalization</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="flip" class="tsd-anchor"></a><h3>flip</h3><div class="tsd-signature tsd-kind-icon">flip<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L144">src/config.ts:144</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>flip input as mirror image</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="height" class="tsd-anchor"></a><h3>height</h3><div class="tsd-signature tsd-kind-icon">height<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L140">src/config.ts:140</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="equalization" class="tsd-anchor"></a><h3>equalization</h3><div class="tsd-signature tsd-kind-icon">equalization<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L128">src/config.ts:128</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>perform image histogram equalization</p>
<ul>
<li>equalization is performed on input as a whole and detected face before its passed for further analysis</li>
</ul>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="flip" class="tsd-anchor"></a><h3>flip</h3><div class="tsd-signature tsd-kind-icon">flip<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L146">src/config.ts:146</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>flip input as mirror image</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="height" class="tsd-anchor"></a><h3>height</h3><div class="tsd-signature tsd-kind-icon">height<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L142">src/config.ts:142</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>resize input height</p>
<ul>
<li>if both width and height are set to 0, there is no resizing</li>
<li>if just one is set, second one is scaled automatically</li>
<li>if both are set, values are used as-is</li>
</ul>
</div><dl class="tsd-comment-tags"><dt>property</dt><dd></dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="hue" class="tsd-anchor"></a><h3>hue</h3><div class="tsd-signature tsd-kind-icon">hue<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L156">src/config.ts:156</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: 0 (no change) to 360 (hue rotation in degrees)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="kodachrome" class="tsd-anchor"></a><h3>kodachrome</h3><div class="tsd-signature tsd-kind-icon">kodachrome<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L164">src/config.ts:164</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image kodachrome colors</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="negative" class="tsd-anchor"></a><h3>negative</h3><div class="tsd-signature tsd-kind-icon">negative<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L158">src/config.ts:158</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image negative</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="pixelate" class="tsd-anchor"></a><h3>pixelate</h3><div class="tsd-signature tsd-kind-icon">pixelate<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L170">src/config.ts:170</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: 0 (no pixelate) to N (number of pixels to pixelate)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="polaroid" class="tsd-anchor"></a><h3>polaroid</h3><div class="tsd-signature tsd-kind-icon">polaroid<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L168">src/config.ts:168</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image polaroid camera effect</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="return" class="tsd-anchor"></a><h3>return</h3><div class="tsd-signature tsd-kind-icon">return<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L142">src/config.ts:142</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>return processed canvas imagedata in result</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="saturation" class="tsd-anchor"></a><h3>saturation</h3><div class="tsd-signature tsd-kind-icon">saturation<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L154">src/config.ts:154</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: -1 (reduce saturation) to 1 (increase saturation)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="sepia" class="tsd-anchor"></a><h3>sepia</h3><div class="tsd-signature tsd-kind-icon">sepia<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L160">src/config.ts:160</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image sepia colors</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="sharpness" class="tsd-anchor"></a><h3>sharpness</h3><div class="tsd-signature tsd-kind-icon">sharpness<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L150">src/config.ts:150</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: 0 (no sharpening) to 1 (maximum sharpening)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="technicolor" class="tsd-anchor"></a><h3>technicolor</h3><div class="tsd-signature tsd-kind-icon">technicolor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L166">src/config.ts:166</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image technicolor colors</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="vintage" class="tsd-anchor"></a><h3>vintage</h3><div class="tsd-signature tsd-kind-icon">vintage<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L162">src/config.ts:162</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image vintage colors</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="width" class="tsd-anchor"></a><h3>width</h3><div class="tsd-signature tsd-kind-icon">width<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L133">src/config.ts:133</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
</div><dl class="tsd-comment-tags"><dt>property</dt><dd></dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="hue" class="tsd-anchor"></a><h3>hue</h3><div class="tsd-signature tsd-kind-icon">hue<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L158">src/config.ts:158</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: 0 (no change) to 360 (hue rotation in degrees)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="kodachrome" class="tsd-anchor"></a><h3>kodachrome</h3><div class="tsd-signature tsd-kind-icon">kodachrome<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L166">src/config.ts:166</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image kodachrome colors</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="negative" class="tsd-anchor"></a><h3>negative</h3><div class="tsd-signature tsd-kind-icon">negative<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L160">src/config.ts:160</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image negative</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="pixelate" class="tsd-anchor"></a><h3>pixelate</h3><div class="tsd-signature tsd-kind-icon">pixelate<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L172">src/config.ts:172</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: 0 (no pixelate) to N (number of pixels to pixelate)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="polaroid" class="tsd-anchor"></a><h3>polaroid</h3><div class="tsd-signature tsd-kind-icon">polaroid<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L170">src/config.ts:170</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image polaroid camera effect</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="return" class="tsd-anchor"></a><h3>return</h3><div class="tsd-signature tsd-kind-icon">return<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L144">src/config.ts:144</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>return processed canvas imagedata in result</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="saturation" class="tsd-anchor"></a><h3>saturation</h3><div class="tsd-signature tsd-kind-icon">saturation<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L156">src/config.ts:156</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: -1 (reduce saturation) to 1 (increase saturation)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="sepia" class="tsd-anchor"></a><h3>sepia</h3><div class="tsd-signature tsd-kind-icon">sepia<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L162">src/config.ts:162</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image sepia colors</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="sharpness" class="tsd-anchor"></a><h3>sharpness</h3><div class="tsd-signature tsd-kind-icon">sharpness<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L152">src/config.ts:152</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>range: 0 (no sharpening) to 1 (maximum sharpening)</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="technicolor" class="tsd-anchor"></a><h3>technicolor</h3><div class="tsd-signature tsd-kind-icon">technicolor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L168">src/config.ts:168</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image technicolor colors</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="vintage" class="tsd-anchor"></a><h3>vintage</h3><div class="tsd-signature tsd-kind-icon">vintage<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L164">src/config.ts:164</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>image vintage colors</p>
</dd></dl></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="width" class="tsd-anchor"></a><h3>width</h3><div class="tsd-signature tsd-kind-icon">width<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L135">src/config.ts:135</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>resize input width</p>
<ul>
<li>if both width and height are set to 0, there is no resizing</li>

View File

@ -1,4 +1,4 @@
<!DOCTYPE html><html class="default no-js"><head><meta charSet="utf-8"/><meta http-equiv="x-ua-compatible" content="IE=edge"/><title>GestureConfig | @vladmandic/human - v2.5.0</title><meta name="description" content="Documentation for @vladmandic/human - v2.5.0"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="stylesheet" href="../assets/style.css"/><link rel="stylesheet" href="../assets/highlight.css"/><script async src="../assets/search.js" id="search-script"></script></head><body><script>document.body.classList.add(localStorage.getItem("tsd-theme") || "os")</script><header><div class="tsd-page-toolbar"><div class="container"><div class="table-wrap"><div class="table-cell" id="tsd-search" data-base=".."><div class="field"><label for="tsd-search-field" class="tsd-widget search no-caption">Search</label><input type="text" id="tsd-search-field"/></div><ul class="results"><li class="state loading">Preparing search index...</li><li class="state failure">The search index is not available</li></ul><a href="../index.html" class="title">@vladmandic/human - v2.5.0</a></div><div class="table-cell" id="tsd-widgets"><div id="tsd-filter"><a href="#" class="tsd-widget options no-caption" data-toggle="options">Options</a><div class="tsd-filter-group"><div class="tsd-select" id="tsd-filter-visibility"><span class="tsd-select-label">All</span><ul class="tsd-select-list"><li data-value="public">Public</li><li data-value="protected">Public/Protected</li><li data-value="private" class="selected">All</li></ul></div> <input type="checkbox" id="tsd-filter-inherited" checked/><label class="tsd-widget" for="tsd-filter-inherited">Inherited</label></div></div><a href="#" class="tsd-widget menu no-caption" data-toggle="menu">Menu</a></div></div></div></div><div class="tsd-page-title"><div class="container"><ul class="tsd-breadcrumb"><li><a href="../index.html">@vladmandic/human - v2.5.0</a></li><li><a href="GestureConfig.html">GestureConfig</a></li></ul><h1>Interface GestureConfig</h1></div></div></header><div class="container container-main"><div class="row"><div class="col-8 col-content"><section class="tsd-panel tsd-comment"><div class="tsd-comment tsd-typography"><div class="lead">
<p>Controlls gesture detection</p>
</div></div></section><section class="tsd-panel tsd-hierarchy"><h3>Hierarchy</h3><ul class="tsd-hierarchy"><li><span class="target">GestureConfig</span></li></ul></section><section class="tsd-panel-group tsd-index-group"><h2>Index</h2><section class="tsd-panel tsd-index-panel"><div class="tsd-index-content"><section class="tsd-index-section "><h3>Properties</h3><ul class="tsd-index-list"><li class="tsd-kind-property tsd-parent-kind-interface"><a href="GestureConfig.html#enabled" class="tsd-kind-icon">enabled</a></li></ul></section></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Properties</h2><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="enabled" class="tsd-anchor"></a><h3>enabled</h3><div class="tsd-signature tsd-kind-icon">enabled<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L176">src/config.ts:176</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>is gesture detection enabled?</p>
</div></div></section><section class="tsd-panel tsd-hierarchy"><h3>Hierarchy</h3><ul class="tsd-hierarchy"><li><span class="target">GestureConfig</span></li></ul></section><section class="tsd-panel-group tsd-index-group"><h2>Index</h2><section class="tsd-panel tsd-index-panel"><div class="tsd-index-content"><section class="tsd-index-section "><h3>Properties</h3><ul class="tsd-index-list"><li class="tsd-kind-property tsd-parent-kind-interface"><a href="GestureConfig.html#enabled" class="tsd-kind-icon">enabled</a></li></ul></section></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Properties</h2><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="enabled" class="tsd-anchor"></a><h3>enabled</h3><div class="tsd-signature tsd-kind-icon">enabled<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L178">src/config.ts:178</a></li></ul></aside><div class="tsd-comment tsd-typography"><dl class="tsd-comment-tags"><dt>property</dt><dd><p>is gesture detection enabled?</p>
</dd></dl></div></section></section></div><div class="col-4 col-menu menu-sticky-wrap menu-highlight"><nav class="tsd-navigation primary"><ul><li class=""><a href="../index.html">Exports</a></li><li class=" tsd-kind-namespace"><a href="../modules/Tensor.html">Tensor</a></li></ul></nav><nav class="tsd-navigation secondary menu-sticky"><ul><li class="current tsd-kind-interface"><a href="GestureConfig.html" class="tsd-kind-icon">Gesture<wbr/>Config</a><ul><li class="tsd-kind-property tsd-parent-kind-interface"><a href="GestureConfig.html#enabled" class="tsd-kind-icon">enabled</a></li></ul></li></ul></nav></div></div></div><footer class=""><div class="container"><h2>Legend</h2><div class="tsd-legend-group"><ul class="tsd-legend"><li class="tsd-kind-constructor tsd-parent-kind-class"><span class="tsd-kind-icon">Constructor</span></li><li class="tsd-kind-property tsd-parent-kind-class"><span class="tsd-kind-icon">Property</span></li><li class="tsd-kind-method tsd-parent-kind-class"><span class="tsd-kind-icon">Method</span></li></ul><ul class="tsd-legend"><li class="tsd-kind-property tsd-parent-kind-interface"><span class="tsd-kind-icon">Property</span></li></ul></div><h2>Settings</h2><p>Theme <select id="theme"><option value="os">OS</option><option value="light">Light</option><option value="dark">Dark</option></select></p></div></footer><div class="overlay"></div><script src="../assets/main.js"></script></body></html>

View File

@ -110,7 +110,9 @@ export interface SegmentationConfig extends GenericConfig {
export interface FilterConfig {
/** @property are image filters enabled? */
enabled: boolean;
/** @property perform image histogram equalization */
/** @property perform image histogram equalization
* - equalization is performed on input as a whole and detected face before its passed for further analysis
*/
equalization: boolean;
/** resize input width
* - if both width and height are set to 0, there is no resizing
@ -207,6 +209,8 @@ export interface Config {
* default: 0.7
*/
cacheSensitivity: number;
/** Perform immediate garbage collection on deallocated tensors instead of caching them */
deallocate: boolean;
/** Internal Variable */
skipAllowed: boolean;
/** {@link FilterConfig} */

View File

@ -132,10 +132,10 @@ export declare class Human {
* @param {boolean} input.getTensor should image processing also return tensor or just canvas
* @returns { tensor, canvas }
*/
image(input: Input, getTensor?: boolean): {
image(input: Input, getTensor?: boolean): Promise<{
tensor: Tensor<import("@tensorflow/tfjs-core").Rank> | null;
canvas: import("./exports").AnyCanvas | null;
};
}>;
/** Segmentation method takes any input and returns processed canvas with body segmentation
* - Segmentation is not triggered as part of detect process
*

View File

@ -2,4 +2,4 @@
* Image enhancements
*/
import type { Tensor } from '../exports';
export declare function histogramEqualization(input: Tensor): Tensor;
export declare function histogramEqualization(inputImage: Tensor): Promise<Tensor>;

View File

@ -4,8 +4,8 @@
import type { Input, AnyCanvas, Tensor, Config } from '../exports';
export declare function canvas(width: any, height: any): AnyCanvas;
export declare function copy(input: AnyCanvas, output?: AnyCanvas): AnyCanvas;
export declare function process(input: Input, config: Config, getTensor?: boolean): {
export declare function process(input: Input, config: Config, getTensor?: boolean): Promise<{
tensor: Tensor | null;
canvas: AnyCanvas | null;
};
}>;
export declare function skip(config: any, input: Tensor): Promise<boolean>;

2
wiki

@ -1 +1 @@
Subproject commit 0deb501cf47e1783e8ca4426b7bf4697196f09e2
Subproject commit e5a6342e4e2dd5d79b73cafada222ef4b1d1621a