mirror of https://github.com/vladmandic/human
new human.compare api
parent
fc93c39201
commit
8a504215a6
|
@ -9,8 +9,12 @@
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/11/05 mandic00@live.com
|
||||
### **HEAD -> main** 2021/11/07 mandic00@live.com
|
||||
|
||||
|
||||
### **origin/main** 2021/11/06 mandic00@live.com
|
||||
|
||||
- new frame change detection algorithm
|
||||
- add histogram equalization
|
||||
- implement wasm missing ops
|
||||
- performance and memory optimizations
|
||||
|
|
25
TODO.md
25
TODO.md
|
@ -42,18 +42,27 @@ MoveNet MultiPose model does not work with WASM backend due to missing F32 broad
|
|||
### Pending release
|
||||
|
||||
New:
|
||||
- New frame change detection algorithm used for cache determination
|
||||
- New frame change detection algorithm used for [cache determination](https://vladmandic.github.io/human/typedoc/interfaces/Config.html#cacheSensitivity)
|
||||
based on temporal input difference
|
||||
- New optional input histogram equalization
|
||||
auto-level input for optimal brightness/contrast via `config.filter.equalization`
|
||||
- New built-in Tensorflow profiling [human.profile](https://vladmandic.github.io/human/typedoc/classes/Human.html#profile)
|
||||
- New optional input histogram equalization [config.filter.equalization](https://vladmandic.github.io/human/)
|
||||
auto-level input for optimal brightness/contrast
|
||||
- New event-baseed interface [human.events](https://vladmandic.github.io/human/typedoc/classes/Human.html#events)
|
||||
- New configuration validation [human.validate](https://vladmandic.github.io/human/typedoc/classes/Human.html#validate)
|
||||
- New input compare function [human.compare](https://vladmandic.github.io/human/typedoc/classes/Human.html#compare)
|
||||
this function is internally used by `human` to determine frame changes and cache validation
|
||||
- New [custom built TFJS](https://github.com/vladmandic/tfjs) for bundled version
|
||||
result is a pure module with reduced bundle size and include built-in support for all backends
|
||||
note: **nobundle** and **node** versions link to standard `@tensorflow` packages
|
||||
|
||||
Changed:
|
||||
- Supports all modules on all backends
|
||||
- [Default configuration values](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) have been tuned for precision and performance
|
||||
- Supports all built-in modules on all backends
|
||||
via custom implementation of missing kernel ops
|
||||
- Performance and precision improvements
|
||||
**face**, **hand** and **gestures** modules
|
||||
- Use custom built TFJS for bundled version
|
||||
reduced bundle size and built-in support for all backends
|
||||
`nobundle` and `node` versions link to standard `@tensorflow` packages
|
||||
- **face**, **hand**
|
||||
- **gestures** modules
|
||||
- **face matching**
|
||||
- Fix **ReactJS** compatibility
|
||||
- Better precision using **WASM**
|
||||
Previous issues due to math low-precision in WASM implementation
|
||||
|
|
|
@ -910,8 +910,8 @@ function GLImageFilter() {
|
|||
this.get = function() {
|
||||
return filterChain;
|
||||
};
|
||||
this.apply = function(image24) {
|
||||
resize(image24.width, image24.height);
|
||||
this.apply = function(image25) {
|
||||
resize(image25.width, image25.height);
|
||||
drawCount = 0;
|
||||
if (!sourceTexture)
|
||||
sourceTexture = gl.createTexture();
|
||||
|
@ -920,7 +920,7 @@ function GLImageFilter() {
|
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image24);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image25);
|
||||
for (let i = 0; i < filterChain.length; i++) {
|
||||
lastInChain = i === filterChain.length - 1;
|
||||
const f = filterChain[i];
|
||||
|
@ -928,9 +928,9 @@ function GLImageFilter() {
|
|||
}
|
||||
return fxcanvas;
|
||||
};
|
||||
this.draw = function(image24) {
|
||||
this.draw = function(image25) {
|
||||
this.add("brightness", 0);
|
||||
return this.apply(image24);
|
||||
return this.apply(image25);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1170,6 +1170,28 @@ async function skip(config3, input) {
|
|||
}
|
||||
return skipFrame;
|
||||
}
|
||||
async function compare(config3, input1, input2) {
|
||||
const t = {};
|
||||
if (!input1 || !input2 || input1.shape.length !== 4 || input1.shape.length !== input2.shape.length) {
|
||||
if (!config3.debug)
|
||||
log("invalid input tensor or tensor shapes do not match:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
if (input1.shape[0] !== 1 || input2.shape[0] !== 1 || input1.shape[3] !== 3 || input2.shape[3] !== 3) {
|
||||
if (!config3.debug)
|
||||
log("input tensors must be of shape [1, height, width, 3]:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
t.input1 = tfjs_esm_exports.clone(input1);
|
||||
t.input2 = input1.shape[1] !== input2.shape[1] || input1.shape[2] !== input2.shape[2] ? tfjs_esm_exports.image.resizeBilinear(input2, [input1.shape[1], input1.shape[2]]) : tfjs_esm_exports.clone(input2);
|
||||
t.diff = tfjs_esm_exports.sub(t.input1, t.input2);
|
||||
t.squared = tfjs_esm_exports.mul(t.diff, t.diff);
|
||||
t.sum = tfjs_esm_exports.sum(t.squared);
|
||||
const diffSum = await t.sum.data();
|
||||
const diffRelative = diffSum[0] / (input1.shape[1] || 1) / (input1.shape[2] || 1) / 255 / 3;
|
||||
tfjs_esm_exports.dispose([t.input1, t.input2, t.diff, t.squared, t.sum]);
|
||||
return diffRelative;
|
||||
}
|
||||
|
||||
// src/util/env.ts
|
||||
var Env = class {
|
||||
|
@ -1321,7 +1343,7 @@ async function load2(config3) {
|
|||
log("cached model:", model2["modelUrl"]);
|
||||
return model2;
|
||||
}
|
||||
async function predict(image24, config3, idx, count2) {
|
||||
async function predict(image25, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model2)
|
||||
return null;
|
||||
|
@ -1333,7 +1355,7 @@ async function predict(image24, config3, idx, count2) {
|
|||
}
|
||||
skipped2 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const resize = tfjs_esm_exports.image.resizeBilinear(image24, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
|
||||
const resize = tfjs_esm_exports.image.resizeBilinear(image25, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
|
||||
const res = model2 == null ? void 0 : model2.execute(resize);
|
||||
const num = (await res.data())[0];
|
||||
cached[idx] = Math.round(100 * num) / 100;
|
||||
|
@ -4647,10 +4669,10 @@ var scaleBoxCoordinates = (box4, factor) => {
|
|||
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
|
||||
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
|
||||
};
|
||||
var cutBoxFromImageAndResize = (box4, image24, cropSize) => {
|
||||
const h = image24.shape[1];
|
||||
const w = image24.shape[2];
|
||||
const crop2 = tfjs_esm_exports.image.cropAndResize(image24, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
|
||||
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
|
||||
const h = image25.shape[1];
|
||||
const w = image25.shape[2];
|
||||
const crop2 = tfjs_esm_exports.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
|
||||
const norm = tfjs_esm_exports.div(crop2, 255);
|
||||
tfjs_esm_exports.dispose(crop2);
|
||||
return norm;
|
||||
|
@ -5298,7 +5320,7 @@ function max2d(inputs, minScore) {
|
|||
return [0, 0, newScore];
|
||||
});
|
||||
}
|
||||
async function predict4(image24, config3) {
|
||||
async function predict4(image25, config3) {
|
||||
const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
|
||||
const skipFrame = skipped5 < (config3.body.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
|
||||
|
@ -5311,7 +5333,7 @@ async function predict4(image24, config3) {
|
|||
const tensor3 = tfjs_esm_exports.tidy(() => {
|
||||
if (!(model5 == null ? void 0 : model5.inputs[0].shape))
|
||||
return null;
|
||||
const resize = tfjs_esm_exports.image.resizeBilinear(image24, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const resize = tfjs_esm_exports.image.resizeBilinear(image25, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const enhance3 = tfjs_esm_exports.mul(resize, 2);
|
||||
const norm = enhance3.sub(1);
|
||||
return norm;
|
||||
|
@ -5338,8 +5360,8 @@ async function predict4(image24, config3) {
|
|||
y2 / model5.inputs[0].shape[1]
|
||||
],
|
||||
position: [
|
||||
Math.round(image24.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image24.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
Math.round(image25.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image25.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
|
@ -5399,7 +5421,7 @@ async function load6(config3) {
|
|||
log("cached model:", model6["modelUrl"]);
|
||||
return model6;
|
||||
}
|
||||
async function predict5(image24, config3, idx, count2) {
|
||||
async function predict5(image25, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model6)
|
||||
return null;
|
||||
|
@ -5416,7 +5438,7 @@ async function predict5(image24, config3, idx, count2) {
|
|||
if ((_a2 = config3.face.emotion) == null ? void 0 : _a2.enabled) {
|
||||
const t = {};
|
||||
const inputSize8 = (model6 == null ? void 0 : model6.inputs[0].shape) ? model6.inputs[0].shape[2] : 0;
|
||||
t.resize = tfjs_esm_exports.image.resizeBilinear(image24, [inputSize8, inputSize8], false);
|
||||
t.resize = tfjs_esm_exports.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
|
||||
[t.red, t.green, t.blue] = tfjs_esm_exports.split(t.resize, 3, 3);
|
||||
t.redNorm = tfjs_esm_exports.mul(t.red, rgb[0]);
|
||||
t.greenNorm = tfjs_esm_exports.mul(t.green, rgb[1]);
|
||||
|
@ -5723,7 +5745,7 @@ function enhance2(input) {
|
|||
tfjs_esm_exports.dispose(crop2);
|
||||
return norm;
|
||||
}
|
||||
async function predict7(image24, config3, idx, count2) {
|
||||
async function predict7(image25, config3, idx, count2) {
|
||||
var _a, _b, _c, _d;
|
||||
if (!model9)
|
||||
return null;
|
||||
|
@ -5743,7 +5765,7 @@ async function predict7(image24, config3, idx, count2) {
|
|||
descriptor: []
|
||||
};
|
||||
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
|
||||
const enhanced = enhance2(image24);
|
||||
const enhanced = enhance2(image25);
|
||||
const resT = model9 == null ? void 0 : model9.execute(enhanced);
|
||||
lastTime7 = now();
|
||||
tfjs_esm_exports.dispose(enhanced);
|
||||
|
@ -5784,16 +5806,16 @@ function getBoxCenter2(box4) {
|
|||
box4.startPoint[1] + (box4.endPoint[1] - box4.startPoint[1]) / 2
|
||||
];
|
||||
}
|
||||
function cutBoxFromImageAndResize2(box4, image24, cropSize) {
|
||||
const h = image24.shape[1];
|
||||
const w = image24.shape[2];
|
||||
function cutBoxFromImageAndResize2(box4, image25, cropSize) {
|
||||
const h = image25.shape[1];
|
||||
const w = image25.shape[2];
|
||||
const boxes = [[
|
||||
box4.startPoint[1] / h,
|
||||
box4.startPoint[0] / w,
|
||||
box4.endPoint[1] / h,
|
||||
box4.endPoint[0] / w
|
||||
]];
|
||||
return tfjs_esm_exports.image.cropAndResize(image24, boxes, [0], cropSize);
|
||||
return tfjs_esm_exports.image.cropAndResize(image25, boxes, [0], cropSize);
|
||||
}
|
||||
function scaleBoxCoordinates2(box4, factor) {
|
||||
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
|
||||
|
@ -8975,13 +8997,13 @@ var HandPipeline = class {
|
|||
Math.trunc(coord[2])
|
||||
]);
|
||||
}
|
||||
async estimateHands(image24, config3) {
|
||||
async estimateHands(image25, config3) {
|
||||
let useFreshBox = false;
|
||||
let boxes;
|
||||
const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
|
||||
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame) {
|
||||
boxes = await this.handDetector.predict(image24, config3);
|
||||
boxes = await this.handDetector.predict(image25, config3);
|
||||
this.skipped = 0;
|
||||
}
|
||||
if (config3.skipAllowed)
|
||||
|
@ -9000,8 +9022,8 @@ var HandPipeline = class {
|
|||
if (config3.hand.landmarks) {
|
||||
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
|
||||
const palmCenter = getBoxCenter2(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image24.shape[2], palmCenter[1] / image24.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tfjs_esm_exports.image.rotateWithOffset(image24, angle, 0, palmCenterNormalized) : image24.clone();
|
||||
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tfjs_esm_exports.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
|
||||
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
|
||||
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
|
||||
|
@ -9842,11 +9864,11 @@ function bodyParts(body4) {
|
|||
}
|
||||
}
|
||||
}
|
||||
for (const [pair, compare] of relative) {
|
||||
for (const [pair, compare2] of relative) {
|
||||
const left = body4.keypoints.findIndex((kp) => kp && kp.part === pair[0]);
|
||||
const right = body4.keypoints.findIndex((kp) => kp && kp.part === pair[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[1]);
|
||||
if (!body4.keypoints[leftTo] || !body4.keypoints[rightTo])
|
||||
continue;
|
||||
const distanceLeft = body4.keypoints[left] ? [
|
||||
|
@ -9939,7 +9961,7 @@ async function load11(config3) {
|
|||
inputSize7 = 256;
|
||||
return model10;
|
||||
}
|
||||
async function parseSinglePose(res, config3, image24, inputBox) {
|
||||
async function parseSinglePose(res, config3, image25, inputBox) {
|
||||
const kpt4 = res[0][0];
|
||||
const keypoints = [];
|
||||
let score = 0;
|
||||
|
@ -9955,15 +9977,15 @@ async function parseSinglePose(res, config3, image24, inputBox) {
|
|||
part: kpt3[id],
|
||||
positionRaw,
|
||||
position: [
|
||||
Math.round((image24.shape[2] || 0) * positionRaw[0]),
|
||||
Math.round((image24.shape[1] || 0) * positionRaw[1])
|
||||
Math.round((image25.shape[2] || 0) * positionRaw[0]),
|
||||
Math.round((image25.shape[1] || 0) * positionRaw[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
score = keypoints.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
|
||||
const bodies = [];
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
|
||||
const annotations2 = {};
|
||||
for (const [name, indexes] of Object.entries(connected3)) {
|
||||
const pt = [];
|
||||
|
@ -9980,7 +10002,7 @@ async function parseSinglePose(res, config3, image24, inputBox) {
|
|||
bodies.push(body4);
|
||||
return bodies;
|
||||
}
|
||||
async function parseMultiPose(res, config3, image24, inputBox) {
|
||||
async function parseMultiPose(res, config3, image25, inputBox) {
|
||||
const bodies = [];
|
||||
for (let id = 0; id < res[0].length; id++) {
|
||||
const kpt4 = res[0][id];
|
||||
|
@ -9998,11 +10020,11 @@ async function parseMultiPose(res, config3, image24, inputBox) {
|
|||
part: kpt3[i],
|
||||
score: Math.round(100 * score) / 100,
|
||||
positionRaw,
|
||||
position: [Math.round((image24.shape[2] || 0) * positionRaw[0]), Math.round((image24.shape[1] || 0) * positionRaw[1])]
|
||||
position: [Math.round((image25.shape[2] || 0) * positionRaw[0]), Math.round((image25.shape[1] || 0) * positionRaw[1])]
|
||||
});
|
||||
}
|
||||
}
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
|
||||
const annotations2 = {};
|
||||
for (const [name, indexes] of Object.entries(connected3)) {
|
||||
const pt = [];
|
||||
|
@ -10134,7 +10156,7 @@ async function process4(res, inputSize8, outputShape, config3) {
|
|||
results = results.filter((_val, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
|
||||
return results;
|
||||
}
|
||||
async function predict11(image24, config3) {
|
||||
async function predict11(image25, config3) {
|
||||
const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
|
||||
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame && last5.length > 0) {
|
||||
|
@ -10145,8 +10167,8 @@ async function predict11(image24, config3) {
|
|||
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
|
||||
return last5;
|
||||
return new Promise(async (resolve) => {
|
||||
const outputSize2 = [image24.shape[2], image24.shape[1]];
|
||||
const resize = tfjs_esm_exports.image.resizeBilinear(image24, [model11.inputSize, model11.inputSize], false);
|
||||
const outputSize2 = [image25.shape[2], image25.shape[1]];
|
||||
const resize = tfjs_esm_exports.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
|
||||
const norm = tfjs_esm_exports.div(resize, 255);
|
||||
const transpose = norm.transpose([0, 3, 1, 2]);
|
||||
tfjs_esm_exports.dispose(norm);
|
||||
|
@ -12862,6 +12884,9 @@ var Human = class {
|
|||
enhance(input) {
|
||||
return enhance2(input);
|
||||
}
|
||||
compare(firstImageTensor, secondImageTensor) {
|
||||
return compare(this.config, firstImageTensor, secondImageTensor);
|
||||
}
|
||||
async init() {
|
||||
await check(this, true);
|
||||
await this.tf.ready();
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1280,7 +1280,7 @@ var require_long = __commonJS({
|
|||
};
|
||||
LongPrototype.gte = LongPrototype.greaterThanOrEqual;
|
||||
LongPrototype.ge = LongPrototype.greaterThanOrEqual;
|
||||
LongPrototype.compare = function compare(other) {
|
||||
LongPrototype.compare = function compare2(other) {
|
||||
if (!isLong(other))
|
||||
other = fromValue(other);
|
||||
if (this.eq(other))
|
||||
|
@ -71495,6 +71495,28 @@ async function skip(config3, input2) {
|
|||
}
|
||||
return skipFrame;
|
||||
}
|
||||
async function compare(config3, input1, input2) {
|
||||
const t = {};
|
||||
if (!input1 || !input2 || input1.shape.length !== 4 || input1.shape.length !== input2.shape.length) {
|
||||
if (!config3.debug)
|
||||
log("invalid input tensor or tensor shapes do not match:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
if (input1.shape[0] !== 1 || input2.shape[0] !== 1 || input1.shape[3] !== 3 || input2.shape[3] !== 3) {
|
||||
if (!config3.debug)
|
||||
log("input tensors must be of shape [1, height, width, 3]:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
t.input1 = clone(input1);
|
||||
t.input2 = input1.shape[1] !== input2.shape[1] || input1.shape[2] !== input2.shape[2] ? image.resizeBilinear(input2, [input1.shape[1], input1.shape[2]]) : clone(input2);
|
||||
t.diff = sub(t.input1, t.input2);
|
||||
t.squared = mul(t.diff, t.diff);
|
||||
t.sum = sum2(t.squared);
|
||||
const diffSum = await t.sum.data();
|
||||
const diffRelative = diffSum[0] / (input1.shape[1] || 1) / (input1.shape[2] || 1) / 255 / 3;
|
||||
dispose([t.input1, t.input2, t.diff, t.squared, t.sum]);
|
||||
return diffRelative;
|
||||
}
|
||||
|
||||
// src/util/env.ts
|
||||
var Env = class {
|
||||
|
@ -80167,11 +80189,11 @@ function bodyParts(body4) {
|
|||
}
|
||||
}
|
||||
}
|
||||
for (const [pair, compare] of relative) {
|
||||
for (const [pair, compare2] of relative) {
|
||||
const left = body4.keypoints.findIndex((kp) => kp && kp.part === pair[0]);
|
||||
const right = body4.keypoints.findIndex((kp) => kp && kp.part === pair[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[1]);
|
||||
if (!body4.keypoints[leftTo] || !body4.keypoints[rightTo])
|
||||
continue;
|
||||
const distanceLeft = body4.keypoints[left] ? [
|
||||
|
@ -83187,6 +83209,9 @@ var Human = class {
|
|||
enhance(input2) {
|
||||
return enhance2(input2);
|
||||
}
|
||||
compare(firstImageTensor, secondImageTensor) {
|
||||
return compare(this.config, firstImageTensor, secondImageTensor);
|
||||
}
|
||||
async init() {
|
||||
await check(this, true);
|
||||
await this.tf.ready();
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1290,7 +1290,7 @@ var Human = (() => {
|
|||
};
|
||||
LongPrototype.gte = LongPrototype.greaterThanOrEqual;
|
||||
LongPrototype.ge = LongPrototype.greaterThanOrEqual;
|
||||
LongPrototype.compare = function compare(other) {
|
||||
LongPrototype.compare = function compare2(other) {
|
||||
if (!isLong(other))
|
||||
other = fromValue(other);
|
||||
if (this.eq(other))
|
||||
|
@ -71505,6 +71505,28 @@ return a / b;`;
|
|||
}
|
||||
return skipFrame;
|
||||
}
|
||||
async function compare(config3, input1, input2) {
|
||||
const t = {};
|
||||
if (!input1 || !input2 || input1.shape.length !== 4 || input1.shape.length !== input2.shape.length) {
|
||||
if (!config3.debug)
|
||||
log("invalid input tensor or tensor shapes do not match:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
if (input1.shape[0] !== 1 || input2.shape[0] !== 1 || input1.shape[3] !== 3 || input2.shape[3] !== 3) {
|
||||
if (!config3.debug)
|
||||
log("input tensors must be of shape [1, height, width, 3]:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
t.input1 = clone(input1);
|
||||
t.input2 = input1.shape[1] !== input2.shape[1] || input1.shape[2] !== input2.shape[2] ? image.resizeBilinear(input2, [input1.shape[1], input1.shape[2]]) : clone(input2);
|
||||
t.diff = sub(t.input1, t.input2);
|
||||
t.squared = mul(t.diff, t.diff);
|
||||
t.sum = sum2(t.squared);
|
||||
const diffSum = await t.sum.data();
|
||||
const diffRelative = diffSum[0] / (input1.shape[1] || 1) / (input1.shape[2] || 1) / 255 / 3;
|
||||
dispose([t.input1, t.input2, t.diff, t.squared, t.sum]);
|
||||
return diffRelative;
|
||||
}
|
||||
|
||||
// src/util/env.ts
|
||||
var Env = class {
|
||||
|
@ -80177,11 +80199,11 @@ return a / b;`;
|
|||
}
|
||||
}
|
||||
}
|
||||
for (const [pair, compare] of relative) {
|
||||
for (const [pair, compare2] of relative) {
|
||||
const left = body4.keypoints.findIndex((kp) => kp && kp.part === pair[0]);
|
||||
const right = body4.keypoints.findIndex((kp) => kp && kp.part === pair[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[1]);
|
||||
if (!body4.keypoints[leftTo] || !body4.keypoints[rightTo])
|
||||
continue;
|
||||
const distanceLeft = body4.keypoints[left] ? [
|
||||
|
@ -83197,6 +83219,9 @@ lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/
|
|||
enhance(input2) {
|
||||
return enhance2(input2);
|
||||
}
|
||||
compare(firstImageTensor, secondImageTensor) {
|
||||
return compare(this.config, firstImageTensor, secondImageTensor);
|
||||
}
|
||||
async init() {
|
||||
await check(this, true);
|
||||
await this.tf.ready();
|
||||
|
|
|
@ -919,8 +919,8 @@ function GLImageFilter() {
|
|||
this.get = function() {
|
||||
return filterChain;
|
||||
};
|
||||
this.apply = function(image24) {
|
||||
resize(image24.width, image24.height);
|
||||
this.apply = function(image25) {
|
||||
resize(image25.width, image25.height);
|
||||
drawCount = 0;
|
||||
if (!sourceTexture)
|
||||
sourceTexture = gl.createTexture();
|
||||
|
@ -929,7 +929,7 @@ function GLImageFilter() {
|
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image24);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image25);
|
||||
for (let i = 0; i < filterChain.length; i++) {
|
||||
lastInChain = i === filterChain.length - 1;
|
||||
const f = filterChain[i];
|
||||
|
@ -937,9 +937,9 @@ function GLImageFilter() {
|
|||
}
|
||||
return fxcanvas;
|
||||
};
|
||||
this.draw = function(image24) {
|
||||
this.draw = function(image25) {
|
||||
this.add("brightness", 0);
|
||||
return this.apply(image24);
|
||||
return this.apply(image25);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1180,6 +1180,28 @@ async function skip(config3, input) {
|
|||
}
|
||||
return skipFrame;
|
||||
}
|
||||
async function compare(config3, input1, input2) {
|
||||
const t = {};
|
||||
if (!input1 || !input2 || input1.shape.length !== 4 || input1.shape.length !== input2.shape.length) {
|
||||
if (!config3.debug)
|
||||
log("invalid input tensor or tensor shapes do not match:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
if (input1.shape[0] !== 1 || input2.shape[0] !== 1 || input1.shape[3] !== 3 || input2.shape[3] !== 3) {
|
||||
if (!config3.debug)
|
||||
log("input tensors must be of shape [1, height, width, 3]:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
t.input1 = tf2.clone(input1);
|
||||
t.input2 = input1.shape[1] !== input2.shape[1] || input1.shape[2] !== input2.shape[2] ? tf2.image.resizeBilinear(input2, [input1.shape[1], input1.shape[2]]) : tf2.clone(input2);
|
||||
t.diff = tf2.sub(t.input1, t.input2);
|
||||
t.squared = tf2.mul(t.diff, t.diff);
|
||||
t.sum = tf2.sum(t.squared);
|
||||
const diffSum = await t.sum.data();
|
||||
const diffRelative = diffSum[0] / (input1.shape[1] || 1) / (input1.shape[2] || 1) / 255 / 3;
|
||||
tf2.dispose([t.input1, t.input2, t.diff, t.squared, t.sum]);
|
||||
return diffRelative;
|
||||
}
|
||||
|
||||
// src/util/env.ts
|
||||
var Env = class {
|
||||
|
@ -1339,7 +1361,7 @@ async function load2(config3) {
|
|||
log("cached model:", model2["modelUrl"]);
|
||||
return model2;
|
||||
}
|
||||
async function predict(image24, config3, idx, count2) {
|
||||
async function predict(image25, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model2)
|
||||
return null;
|
||||
|
@ -1351,7 +1373,7 @@ async function predict(image24, config3, idx, count2) {
|
|||
}
|
||||
skipped2 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const resize = tf5.image.resizeBilinear(image24, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
|
||||
const resize = tf5.image.resizeBilinear(image25, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
|
||||
const res = model2 == null ? void 0 : model2.execute(resize);
|
||||
const num = (await res.data())[0];
|
||||
cached[idx] = Math.round(100 * num) / 100;
|
||||
|
@ -4671,10 +4693,10 @@ var scaleBoxCoordinates = (box4, factor) => {
|
|||
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
|
||||
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
|
||||
};
|
||||
var cutBoxFromImageAndResize = (box4, image24, cropSize) => {
|
||||
const h = image24.shape[1];
|
||||
const w = image24.shape[2];
|
||||
const crop2 = tf6.image.cropAndResize(image24, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
|
||||
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
|
||||
const h = image25.shape[1];
|
||||
const w = image25.shape[2];
|
||||
const crop2 = tf6.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
|
||||
const norm = tf6.div(crop2, 255);
|
||||
tf6.dispose(crop2);
|
||||
return norm;
|
||||
|
@ -5331,7 +5353,7 @@ function max2d(inputs, minScore) {
|
|||
return [0, 0, newScore];
|
||||
});
|
||||
}
|
||||
async function predict4(image24, config3) {
|
||||
async function predict4(image25, config3) {
|
||||
const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
|
||||
const skipFrame = skipped5 < (config3.body.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
|
||||
|
@ -5344,7 +5366,7 @@ async function predict4(image24, config3) {
|
|||
const tensor3 = tf10.tidy(() => {
|
||||
if (!(model5 == null ? void 0 : model5.inputs[0].shape))
|
||||
return null;
|
||||
const resize = tf10.image.resizeBilinear(image24, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const resize = tf10.image.resizeBilinear(image25, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const enhance3 = tf10.mul(resize, 2);
|
||||
const norm = enhance3.sub(1);
|
||||
return norm;
|
||||
|
@ -5371,8 +5393,8 @@ async function predict4(image24, config3) {
|
|||
y2 / model5.inputs[0].shape[1]
|
||||
],
|
||||
position: [
|
||||
Math.round(image24.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image24.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
Math.round(image25.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image25.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
|
@ -5433,7 +5455,7 @@ async function load6(config3) {
|
|||
log("cached model:", model6["modelUrl"]);
|
||||
return model6;
|
||||
}
|
||||
async function predict5(image24, config3, idx, count2) {
|
||||
async function predict5(image25, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model6)
|
||||
return null;
|
||||
|
@ -5450,7 +5472,7 @@ async function predict5(image24, config3, idx, count2) {
|
|||
if ((_a2 = config3.face.emotion) == null ? void 0 : _a2.enabled) {
|
||||
const t = {};
|
||||
const inputSize8 = (model6 == null ? void 0 : model6.inputs[0].shape) ? model6.inputs[0].shape[2] : 0;
|
||||
t.resize = tf11.image.resizeBilinear(image24, [inputSize8, inputSize8], false);
|
||||
t.resize = tf11.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
|
||||
[t.red, t.green, t.blue] = tf11.split(t.resize, 3, 3);
|
||||
t.redNorm = tf11.mul(t.red, rgb[0]);
|
||||
t.greenNorm = tf11.mul(t.green, rgb[1]);
|
||||
|
@ -5762,7 +5784,7 @@ function enhance2(input) {
|
|||
tf14.dispose(crop2);
|
||||
return norm;
|
||||
}
|
||||
async function predict7(image24, config3, idx, count2) {
|
||||
async function predict7(image25, config3, idx, count2) {
|
||||
var _a, _b, _c, _d;
|
||||
if (!model9)
|
||||
return null;
|
||||
|
@ -5782,7 +5804,7 @@ async function predict7(image24, config3, idx, count2) {
|
|||
descriptor: []
|
||||
};
|
||||
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
|
||||
const enhanced = enhance2(image24);
|
||||
const enhanced = enhance2(image25);
|
||||
const resT = model9 == null ? void 0 : model9.execute(enhanced);
|
||||
lastTime7 = now();
|
||||
tf14.dispose(enhanced);
|
||||
|
@ -5830,16 +5852,16 @@ function getBoxCenter2(box4) {
|
|||
box4.startPoint[1] + (box4.endPoint[1] - box4.startPoint[1]) / 2
|
||||
];
|
||||
}
|
||||
function cutBoxFromImageAndResize2(box4, image24, cropSize) {
|
||||
const h = image24.shape[1];
|
||||
const w = image24.shape[2];
|
||||
function cutBoxFromImageAndResize2(box4, image25, cropSize) {
|
||||
const h = image25.shape[1];
|
||||
const w = image25.shape[2];
|
||||
const boxes = [[
|
||||
box4.startPoint[1] / h,
|
||||
box4.startPoint[0] / w,
|
||||
box4.endPoint[1] / h,
|
||||
box4.endPoint[0] / w
|
||||
]];
|
||||
return tf15.image.cropAndResize(image24, boxes, [0], cropSize);
|
||||
return tf15.image.cropAndResize(image25, boxes, [0], cropSize);
|
||||
}
|
||||
function scaleBoxCoordinates2(box4, factor) {
|
||||
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
|
||||
|
@ -9022,13 +9044,13 @@ var HandPipeline = class {
|
|||
Math.trunc(coord[2])
|
||||
]);
|
||||
}
|
||||
async estimateHands(image24, config3) {
|
||||
async estimateHands(image25, config3) {
|
||||
let useFreshBox = false;
|
||||
let boxes;
|
||||
const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
|
||||
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame) {
|
||||
boxes = await this.handDetector.predict(image24, config3);
|
||||
boxes = await this.handDetector.predict(image25, config3);
|
||||
this.skipped = 0;
|
||||
}
|
||||
if (config3.skipAllowed)
|
||||
|
@ -9047,8 +9069,8 @@ var HandPipeline = class {
|
|||
if (config3.hand.landmarks) {
|
||||
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
|
||||
const palmCenter = getBoxCenter2(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image24.shape[2], palmCenter[1] / image24.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image24, angle, 0, palmCenterNormalized) : image24.clone();
|
||||
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
|
||||
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
|
||||
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
|
||||
|
@ -9894,11 +9916,11 @@ function bodyParts(body4) {
|
|||
}
|
||||
}
|
||||
}
|
||||
for (const [pair, compare] of relative) {
|
||||
for (const [pair, compare2] of relative) {
|
||||
const left = body4.keypoints.findIndex((kp) => kp && kp.part === pair[0]);
|
||||
const right = body4.keypoints.findIndex((kp) => kp && kp.part === pair[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[1]);
|
||||
if (!body4.keypoints[leftTo] || !body4.keypoints[rightTo])
|
||||
continue;
|
||||
const distanceLeft = body4.keypoints[left] ? [
|
||||
|
@ -9991,7 +10013,7 @@ async function load11(config3) {
|
|||
inputSize7 = 256;
|
||||
return model10;
|
||||
}
|
||||
async function parseSinglePose(res, config3, image24, inputBox) {
|
||||
async function parseSinglePose(res, config3, image25, inputBox) {
|
||||
const kpt4 = res[0][0];
|
||||
const keypoints = [];
|
||||
let score = 0;
|
||||
|
@ -10007,15 +10029,15 @@ async function parseSinglePose(res, config3, image24, inputBox) {
|
|||
part: kpt3[id],
|
||||
positionRaw,
|
||||
position: [
|
||||
Math.round((image24.shape[2] || 0) * positionRaw[0]),
|
||||
Math.round((image24.shape[1] || 0) * positionRaw[1])
|
||||
Math.round((image25.shape[2] || 0) * positionRaw[0]),
|
||||
Math.round((image25.shape[1] || 0) * positionRaw[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
score = keypoints.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
|
||||
const bodies = [];
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
|
||||
const annotations2 = {};
|
||||
for (const [name, indexes] of Object.entries(connected3)) {
|
||||
const pt = [];
|
||||
|
@ -10032,7 +10054,7 @@ async function parseSinglePose(res, config3, image24, inputBox) {
|
|||
bodies.push(body4);
|
||||
return bodies;
|
||||
}
|
||||
async function parseMultiPose(res, config3, image24, inputBox) {
|
||||
async function parseMultiPose(res, config3, image25, inputBox) {
|
||||
const bodies = [];
|
||||
for (let id = 0; id < res[0].length; id++) {
|
||||
const kpt4 = res[0][id];
|
||||
|
@ -10050,11 +10072,11 @@ async function parseMultiPose(res, config3, image24, inputBox) {
|
|||
part: kpt3[i],
|
||||
score: Math.round(100 * score) / 100,
|
||||
positionRaw,
|
||||
position: [Math.round((image24.shape[2] || 0) * positionRaw[0]), Math.round((image24.shape[1] || 0) * positionRaw[1])]
|
||||
position: [Math.round((image25.shape[2] || 0) * positionRaw[0]), Math.round((image25.shape[1] || 0) * positionRaw[1])]
|
||||
});
|
||||
}
|
||||
}
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
|
||||
const annotations2 = {};
|
||||
for (const [name, indexes] of Object.entries(connected3)) {
|
||||
const pt = [];
|
||||
|
@ -10187,7 +10209,7 @@ async function process4(res, inputSize8, outputShape, config3) {
|
|||
results = results.filter((_val, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
|
||||
return results;
|
||||
}
|
||||
async function predict11(image24, config3) {
|
||||
async function predict11(image25, config3) {
|
||||
const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
|
||||
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame && last5.length > 0) {
|
||||
|
@ -10198,8 +10220,8 @@ async function predict11(image24, config3) {
|
|||
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
|
||||
return last5;
|
||||
return new Promise(async (resolve) => {
|
||||
const outputSize2 = [image24.shape[2], image24.shape[1]];
|
||||
const resize = tf22.image.resizeBilinear(image24, [model11.inputSize, model11.inputSize], false);
|
||||
const outputSize2 = [image25.shape[2], image25.shape[1]];
|
||||
const resize = tf22.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
|
||||
const norm = tf22.div(resize, 255);
|
||||
const transpose = norm.transpose([0, 3, 1, 2]);
|
||||
tf22.dispose(norm);
|
||||
|
@ -12924,6 +12946,9 @@ var Human = class {
|
|||
enhance(input) {
|
||||
return enhance2(input);
|
||||
}
|
||||
compare(firstImageTensor, secondImageTensor) {
|
||||
return compare(this.config, firstImageTensor, secondImageTensor);
|
||||
}
|
||||
async init() {
|
||||
await check(this, true);
|
||||
await this.tf.ready();
|
||||
|
|
|
@ -920,8 +920,8 @@ function GLImageFilter() {
|
|||
this.get = function() {
|
||||
return filterChain;
|
||||
};
|
||||
this.apply = function(image24) {
|
||||
resize(image24.width, image24.height);
|
||||
this.apply = function(image25) {
|
||||
resize(image25.width, image25.height);
|
||||
drawCount = 0;
|
||||
if (!sourceTexture)
|
||||
sourceTexture = gl.createTexture();
|
||||
|
@ -930,7 +930,7 @@ function GLImageFilter() {
|
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image24);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image25);
|
||||
for (let i = 0; i < filterChain.length; i++) {
|
||||
lastInChain = i === filterChain.length - 1;
|
||||
const f = filterChain[i];
|
||||
|
@ -938,9 +938,9 @@ function GLImageFilter() {
|
|||
}
|
||||
return fxcanvas;
|
||||
};
|
||||
this.draw = function(image24) {
|
||||
this.draw = function(image25) {
|
||||
this.add("brightness", 0);
|
||||
return this.apply(image24);
|
||||
return this.apply(image25);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1181,6 +1181,28 @@ async function skip(config3, input) {
|
|||
}
|
||||
return skipFrame;
|
||||
}
|
||||
async function compare(config3, input1, input2) {
|
||||
const t = {};
|
||||
if (!input1 || !input2 || input1.shape.length !== 4 || input1.shape.length !== input2.shape.length) {
|
||||
if (!config3.debug)
|
||||
log("invalid input tensor or tensor shapes do not match:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
if (input1.shape[0] !== 1 || input2.shape[0] !== 1 || input1.shape[3] !== 3 || input2.shape[3] !== 3) {
|
||||
if (!config3.debug)
|
||||
log("input tensors must be of shape [1, height, width, 3]:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
t.input1 = tf2.clone(input1);
|
||||
t.input2 = input1.shape[1] !== input2.shape[1] || input1.shape[2] !== input2.shape[2] ? tf2.image.resizeBilinear(input2, [input1.shape[1], input1.shape[2]]) : tf2.clone(input2);
|
||||
t.diff = tf2.sub(t.input1, t.input2);
|
||||
t.squared = tf2.mul(t.diff, t.diff);
|
||||
t.sum = tf2.sum(t.squared);
|
||||
const diffSum = await t.sum.data();
|
||||
const diffRelative = diffSum[0] / (input1.shape[1] || 1) / (input1.shape[2] || 1) / 255 / 3;
|
||||
tf2.dispose([t.input1, t.input2, t.diff, t.squared, t.sum]);
|
||||
return diffRelative;
|
||||
}
|
||||
|
||||
// src/util/env.ts
|
||||
var Env = class {
|
||||
|
@ -1340,7 +1362,7 @@ async function load2(config3) {
|
|||
log("cached model:", model2["modelUrl"]);
|
||||
return model2;
|
||||
}
|
||||
async function predict(image24, config3, idx, count2) {
|
||||
async function predict(image25, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model2)
|
||||
return null;
|
||||
|
@ -1352,7 +1374,7 @@ async function predict(image24, config3, idx, count2) {
|
|||
}
|
||||
skipped2 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const resize = tf5.image.resizeBilinear(image24, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
|
||||
const resize = tf5.image.resizeBilinear(image25, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
|
||||
const res = model2 == null ? void 0 : model2.execute(resize);
|
||||
const num = (await res.data())[0];
|
||||
cached[idx] = Math.round(100 * num) / 100;
|
||||
|
@ -4672,10 +4694,10 @@ var scaleBoxCoordinates = (box4, factor) => {
|
|||
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
|
||||
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
|
||||
};
|
||||
var cutBoxFromImageAndResize = (box4, image24, cropSize) => {
|
||||
const h = image24.shape[1];
|
||||
const w = image24.shape[2];
|
||||
const crop2 = tf6.image.cropAndResize(image24, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
|
||||
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
|
||||
const h = image25.shape[1];
|
||||
const w = image25.shape[2];
|
||||
const crop2 = tf6.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
|
||||
const norm = tf6.div(crop2, 255);
|
||||
tf6.dispose(crop2);
|
||||
return norm;
|
||||
|
@ -5332,7 +5354,7 @@ function max2d(inputs, minScore) {
|
|||
return [0, 0, newScore];
|
||||
});
|
||||
}
|
||||
async function predict4(image24, config3) {
|
||||
async function predict4(image25, config3) {
|
||||
const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
|
||||
const skipFrame = skipped5 < (config3.body.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
|
||||
|
@ -5345,7 +5367,7 @@ async function predict4(image24, config3) {
|
|||
const tensor3 = tf10.tidy(() => {
|
||||
if (!(model5 == null ? void 0 : model5.inputs[0].shape))
|
||||
return null;
|
||||
const resize = tf10.image.resizeBilinear(image24, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const resize = tf10.image.resizeBilinear(image25, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const enhance3 = tf10.mul(resize, 2);
|
||||
const norm = enhance3.sub(1);
|
||||
return norm;
|
||||
|
@ -5372,8 +5394,8 @@ async function predict4(image24, config3) {
|
|||
y2 / model5.inputs[0].shape[1]
|
||||
],
|
||||
position: [
|
||||
Math.round(image24.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image24.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
Math.round(image25.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image25.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
|
@ -5434,7 +5456,7 @@ async function load6(config3) {
|
|||
log("cached model:", model6["modelUrl"]);
|
||||
return model6;
|
||||
}
|
||||
async function predict5(image24, config3, idx, count2) {
|
||||
async function predict5(image25, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model6)
|
||||
return null;
|
||||
|
@ -5451,7 +5473,7 @@ async function predict5(image24, config3, idx, count2) {
|
|||
if ((_a2 = config3.face.emotion) == null ? void 0 : _a2.enabled) {
|
||||
const t = {};
|
||||
const inputSize8 = (model6 == null ? void 0 : model6.inputs[0].shape) ? model6.inputs[0].shape[2] : 0;
|
||||
t.resize = tf11.image.resizeBilinear(image24, [inputSize8, inputSize8], false);
|
||||
t.resize = tf11.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
|
||||
[t.red, t.green, t.blue] = tf11.split(t.resize, 3, 3);
|
||||
t.redNorm = tf11.mul(t.red, rgb[0]);
|
||||
t.greenNorm = tf11.mul(t.green, rgb[1]);
|
||||
|
@ -5763,7 +5785,7 @@ function enhance2(input) {
|
|||
tf14.dispose(crop2);
|
||||
return norm;
|
||||
}
|
||||
async function predict7(image24, config3, idx, count2) {
|
||||
async function predict7(image25, config3, idx, count2) {
|
||||
var _a, _b, _c, _d;
|
||||
if (!model9)
|
||||
return null;
|
||||
|
@ -5783,7 +5805,7 @@ async function predict7(image24, config3, idx, count2) {
|
|||
descriptor: []
|
||||
};
|
||||
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
|
||||
const enhanced = enhance2(image24);
|
||||
const enhanced = enhance2(image25);
|
||||
const resT = model9 == null ? void 0 : model9.execute(enhanced);
|
||||
lastTime7 = now();
|
||||
tf14.dispose(enhanced);
|
||||
|
@ -5831,16 +5853,16 @@ function getBoxCenter2(box4) {
|
|||
box4.startPoint[1] + (box4.endPoint[1] - box4.startPoint[1]) / 2
|
||||
];
|
||||
}
|
||||
function cutBoxFromImageAndResize2(box4, image24, cropSize) {
|
||||
const h = image24.shape[1];
|
||||
const w = image24.shape[2];
|
||||
function cutBoxFromImageAndResize2(box4, image25, cropSize) {
|
||||
const h = image25.shape[1];
|
||||
const w = image25.shape[2];
|
||||
const boxes = [[
|
||||
box4.startPoint[1] / h,
|
||||
box4.startPoint[0] / w,
|
||||
box4.endPoint[1] / h,
|
||||
box4.endPoint[0] / w
|
||||
]];
|
||||
return tf15.image.cropAndResize(image24, boxes, [0], cropSize);
|
||||
return tf15.image.cropAndResize(image25, boxes, [0], cropSize);
|
||||
}
|
||||
function scaleBoxCoordinates2(box4, factor) {
|
||||
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
|
||||
|
@ -9023,13 +9045,13 @@ var HandPipeline = class {
|
|||
Math.trunc(coord[2])
|
||||
]);
|
||||
}
|
||||
async estimateHands(image24, config3) {
|
||||
async estimateHands(image25, config3) {
|
||||
let useFreshBox = false;
|
||||
let boxes;
|
||||
const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
|
||||
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame) {
|
||||
boxes = await this.handDetector.predict(image24, config3);
|
||||
boxes = await this.handDetector.predict(image25, config3);
|
||||
this.skipped = 0;
|
||||
}
|
||||
if (config3.skipAllowed)
|
||||
|
@ -9048,8 +9070,8 @@ var HandPipeline = class {
|
|||
if (config3.hand.landmarks) {
|
||||
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
|
||||
const palmCenter = getBoxCenter2(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image24.shape[2], palmCenter[1] / image24.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image24, angle, 0, palmCenterNormalized) : image24.clone();
|
||||
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
|
||||
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
|
||||
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
|
||||
|
@ -9895,11 +9917,11 @@ function bodyParts(body4) {
|
|||
}
|
||||
}
|
||||
}
|
||||
for (const [pair, compare] of relative) {
|
||||
for (const [pair, compare2] of relative) {
|
||||
const left = body4.keypoints.findIndex((kp) => kp && kp.part === pair[0]);
|
||||
const right = body4.keypoints.findIndex((kp) => kp && kp.part === pair[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[1]);
|
||||
if (!body4.keypoints[leftTo] || !body4.keypoints[rightTo])
|
||||
continue;
|
||||
const distanceLeft = body4.keypoints[left] ? [
|
||||
|
@ -9992,7 +10014,7 @@ async function load11(config3) {
|
|||
inputSize7 = 256;
|
||||
return model10;
|
||||
}
|
||||
async function parseSinglePose(res, config3, image24, inputBox) {
|
||||
async function parseSinglePose(res, config3, image25, inputBox) {
|
||||
const kpt4 = res[0][0];
|
||||
const keypoints = [];
|
||||
let score = 0;
|
||||
|
@ -10008,15 +10030,15 @@ async function parseSinglePose(res, config3, image24, inputBox) {
|
|||
part: kpt3[id],
|
||||
positionRaw,
|
||||
position: [
|
||||
Math.round((image24.shape[2] || 0) * positionRaw[0]),
|
||||
Math.round((image24.shape[1] || 0) * positionRaw[1])
|
||||
Math.round((image25.shape[2] || 0) * positionRaw[0]),
|
||||
Math.round((image25.shape[1] || 0) * positionRaw[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
score = keypoints.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
|
||||
const bodies = [];
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
|
||||
const annotations2 = {};
|
||||
for (const [name, indexes] of Object.entries(connected3)) {
|
||||
const pt = [];
|
||||
|
@ -10033,7 +10055,7 @@ async function parseSinglePose(res, config3, image24, inputBox) {
|
|||
bodies.push(body4);
|
||||
return bodies;
|
||||
}
|
||||
async function parseMultiPose(res, config3, image24, inputBox) {
|
||||
async function parseMultiPose(res, config3, image25, inputBox) {
|
||||
const bodies = [];
|
||||
for (let id = 0; id < res[0].length; id++) {
|
||||
const kpt4 = res[0][id];
|
||||
|
@ -10051,11 +10073,11 @@ async function parseMultiPose(res, config3, image24, inputBox) {
|
|||
part: kpt3[i],
|
||||
score: Math.round(100 * score) / 100,
|
||||
positionRaw,
|
||||
position: [Math.round((image24.shape[2] || 0) * positionRaw[0]), Math.round((image24.shape[1] || 0) * positionRaw[1])]
|
||||
position: [Math.round((image25.shape[2] || 0) * positionRaw[0]), Math.round((image25.shape[1] || 0) * positionRaw[1])]
|
||||
});
|
||||
}
|
||||
}
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
|
||||
const annotations2 = {};
|
||||
for (const [name, indexes] of Object.entries(connected3)) {
|
||||
const pt = [];
|
||||
|
@ -10188,7 +10210,7 @@ async function process4(res, inputSize8, outputShape, config3) {
|
|||
results = results.filter((_val, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
|
||||
return results;
|
||||
}
|
||||
async function predict11(image24, config3) {
|
||||
async function predict11(image25, config3) {
|
||||
const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
|
||||
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame && last5.length > 0) {
|
||||
|
@ -10199,8 +10221,8 @@ async function predict11(image24, config3) {
|
|||
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
|
||||
return last5;
|
||||
return new Promise(async (resolve) => {
|
||||
const outputSize2 = [image24.shape[2], image24.shape[1]];
|
||||
const resize = tf22.image.resizeBilinear(image24, [model11.inputSize, model11.inputSize], false);
|
||||
const outputSize2 = [image25.shape[2], image25.shape[1]];
|
||||
const resize = tf22.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
|
||||
const norm = tf22.div(resize, 255);
|
||||
const transpose = norm.transpose([0, 3, 1, 2]);
|
||||
tf22.dispose(norm);
|
||||
|
@ -12925,6 +12947,9 @@ var Human = class {
|
|||
enhance(input) {
|
||||
return enhance2(input);
|
||||
}
|
||||
compare(firstImageTensor, secondImageTensor) {
|
||||
return compare(this.config, firstImageTensor, secondImageTensor);
|
||||
}
|
||||
async init() {
|
||||
await check(this, true);
|
||||
await this.tf.ready();
|
||||
|
|
|
@ -919,8 +919,8 @@ function GLImageFilter() {
|
|||
this.get = function() {
|
||||
return filterChain;
|
||||
};
|
||||
this.apply = function(image24) {
|
||||
resize(image24.width, image24.height);
|
||||
this.apply = function(image25) {
|
||||
resize(image25.width, image25.height);
|
||||
drawCount = 0;
|
||||
if (!sourceTexture)
|
||||
sourceTexture = gl.createTexture();
|
||||
|
@ -929,7 +929,7 @@ function GLImageFilter() {
|
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image24);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image25);
|
||||
for (let i = 0; i < filterChain.length; i++) {
|
||||
lastInChain = i === filterChain.length - 1;
|
||||
const f = filterChain[i];
|
||||
|
@ -937,9 +937,9 @@ function GLImageFilter() {
|
|||
}
|
||||
return fxcanvas;
|
||||
};
|
||||
this.draw = function(image24) {
|
||||
this.draw = function(image25) {
|
||||
this.add("brightness", 0);
|
||||
return this.apply(image24);
|
||||
return this.apply(image25);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1180,6 +1180,28 @@ async function skip(config3, input) {
|
|||
}
|
||||
return skipFrame;
|
||||
}
|
||||
async function compare(config3, input1, input2) {
|
||||
const t = {};
|
||||
if (!input1 || !input2 || input1.shape.length !== 4 || input1.shape.length !== input2.shape.length) {
|
||||
if (!config3.debug)
|
||||
log("invalid input tensor or tensor shapes do not match:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
if (input1.shape[0] !== 1 || input2.shape[0] !== 1 || input1.shape[3] !== 3 || input2.shape[3] !== 3) {
|
||||
if (!config3.debug)
|
||||
log("input tensors must be of shape [1, height, width, 3]:", input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
t.input1 = tf2.clone(input1);
|
||||
t.input2 = input1.shape[1] !== input2.shape[1] || input1.shape[2] !== input2.shape[2] ? tf2.image.resizeBilinear(input2, [input1.shape[1], input1.shape[2]]) : tf2.clone(input2);
|
||||
t.diff = tf2.sub(t.input1, t.input2);
|
||||
t.squared = tf2.mul(t.diff, t.diff);
|
||||
t.sum = tf2.sum(t.squared);
|
||||
const diffSum = await t.sum.data();
|
||||
const diffRelative = diffSum[0] / (input1.shape[1] || 1) / (input1.shape[2] || 1) / 255 / 3;
|
||||
tf2.dispose([t.input1, t.input2, t.diff, t.squared, t.sum]);
|
||||
return diffRelative;
|
||||
}
|
||||
|
||||
// src/util/env.ts
|
||||
var Env = class {
|
||||
|
@ -1339,7 +1361,7 @@ async function load2(config3) {
|
|||
log("cached model:", model2["modelUrl"]);
|
||||
return model2;
|
||||
}
|
||||
async function predict(image24, config3, idx, count2) {
|
||||
async function predict(image25, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model2)
|
||||
return null;
|
||||
|
@ -1351,7 +1373,7 @@ async function predict(image24, config3, idx, count2) {
|
|||
}
|
||||
skipped2 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const resize = tf5.image.resizeBilinear(image24, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
|
||||
const resize = tf5.image.resizeBilinear(image25, [(model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[2] : 0, (model2 == null ? void 0 : model2.inputs[0].shape) ? model2.inputs[0].shape[1] : 0], false);
|
||||
const res = model2 == null ? void 0 : model2.execute(resize);
|
||||
const num = (await res.data())[0];
|
||||
cached[idx] = Math.round(100 * num) / 100;
|
||||
|
@ -4671,10 +4693,10 @@ var scaleBoxCoordinates = (box4, factor) => {
|
|||
const endPoint = [box4.endPoint[0] * factor[0], box4.endPoint[1] * factor[1]];
|
||||
return { startPoint, endPoint, landmarks: box4.landmarks, confidence: box4.confidence };
|
||||
};
|
||||
var cutBoxFromImageAndResize = (box4, image24, cropSize) => {
|
||||
const h = image24.shape[1];
|
||||
const w = image24.shape[2];
|
||||
const crop2 = tf6.image.cropAndResize(image24, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
|
||||
var cutBoxFromImageAndResize = (box4, image25, cropSize) => {
|
||||
const h = image25.shape[1];
|
||||
const w = image25.shape[2];
|
||||
const crop2 = tf6.image.cropAndResize(image25, [[box4.startPoint[1] / h, box4.startPoint[0] / w, box4.endPoint[1] / h, box4.endPoint[0] / w]], [0], cropSize);
|
||||
const norm = tf6.div(crop2, 255);
|
||||
tf6.dispose(crop2);
|
||||
return norm;
|
||||
|
@ -5331,7 +5353,7 @@ function max2d(inputs, minScore) {
|
|||
return [0, 0, newScore];
|
||||
});
|
||||
}
|
||||
async function predict4(image24, config3) {
|
||||
async function predict4(image25, config3) {
|
||||
const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
|
||||
const skipFrame = skipped5 < (config3.body.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
|
||||
|
@ -5344,7 +5366,7 @@ async function predict4(image24, config3) {
|
|||
const tensor3 = tf10.tidy(() => {
|
||||
if (!(model5 == null ? void 0 : model5.inputs[0].shape))
|
||||
return null;
|
||||
const resize = tf10.image.resizeBilinear(image24, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const resize = tf10.image.resizeBilinear(image25, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const enhance3 = tf10.mul(resize, 2);
|
||||
const norm = enhance3.sub(1);
|
||||
return norm;
|
||||
|
@ -5371,8 +5393,8 @@ async function predict4(image24, config3) {
|
|||
y2 / model5.inputs[0].shape[1]
|
||||
],
|
||||
position: [
|
||||
Math.round(image24.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image24.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
Math.round(image25.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image25.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
|
@ -5433,7 +5455,7 @@ async function load6(config3) {
|
|||
log("cached model:", model6["modelUrl"]);
|
||||
return model6;
|
||||
}
|
||||
async function predict5(image24, config3, idx, count2) {
|
||||
async function predict5(image25, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model6)
|
||||
return null;
|
||||
|
@ -5450,7 +5472,7 @@ async function predict5(image24, config3, idx, count2) {
|
|||
if ((_a2 = config3.face.emotion) == null ? void 0 : _a2.enabled) {
|
||||
const t = {};
|
||||
const inputSize8 = (model6 == null ? void 0 : model6.inputs[0].shape) ? model6.inputs[0].shape[2] : 0;
|
||||
t.resize = tf11.image.resizeBilinear(image24, [inputSize8, inputSize8], false);
|
||||
t.resize = tf11.image.resizeBilinear(image25, [inputSize8, inputSize8], false);
|
||||
[t.red, t.green, t.blue] = tf11.split(t.resize, 3, 3);
|
||||
t.redNorm = tf11.mul(t.red, rgb[0]);
|
||||
t.greenNorm = tf11.mul(t.green, rgb[1]);
|
||||
|
@ -5762,7 +5784,7 @@ function enhance2(input) {
|
|||
tf14.dispose(crop2);
|
||||
return norm;
|
||||
}
|
||||
async function predict7(image24, config3, idx, count2) {
|
||||
async function predict7(image25, config3, idx, count2) {
|
||||
var _a, _b, _c, _d;
|
||||
if (!model9)
|
||||
return null;
|
||||
|
@ -5782,7 +5804,7 @@ async function predict7(image24, config3, idx, count2) {
|
|||
descriptor: []
|
||||
};
|
||||
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
|
||||
const enhanced = enhance2(image24);
|
||||
const enhanced = enhance2(image25);
|
||||
const resT = model9 == null ? void 0 : model9.execute(enhanced);
|
||||
lastTime7 = now();
|
||||
tf14.dispose(enhanced);
|
||||
|
@ -5830,16 +5852,16 @@ function getBoxCenter2(box4) {
|
|||
box4.startPoint[1] + (box4.endPoint[1] - box4.startPoint[1]) / 2
|
||||
];
|
||||
}
|
||||
function cutBoxFromImageAndResize2(box4, image24, cropSize) {
|
||||
const h = image24.shape[1];
|
||||
const w = image24.shape[2];
|
||||
function cutBoxFromImageAndResize2(box4, image25, cropSize) {
|
||||
const h = image25.shape[1];
|
||||
const w = image25.shape[2];
|
||||
const boxes = [[
|
||||
box4.startPoint[1] / h,
|
||||
box4.startPoint[0] / w,
|
||||
box4.endPoint[1] / h,
|
||||
box4.endPoint[0] / w
|
||||
]];
|
||||
return tf15.image.cropAndResize(image24, boxes, [0], cropSize);
|
||||
return tf15.image.cropAndResize(image25, boxes, [0], cropSize);
|
||||
}
|
||||
function scaleBoxCoordinates2(box4, factor) {
|
||||
const startPoint = [box4.startPoint[0] * factor[0], box4.startPoint[1] * factor[1]];
|
||||
|
@ -9022,13 +9044,13 @@ var HandPipeline = class {
|
|||
Math.trunc(coord[2])
|
||||
]);
|
||||
}
|
||||
async estimateHands(image24, config3) {
|
||||
async estimateHands(image25, config3) {
|
||||
let useFreshBox = false;
|
||||
let boxes;
|
||||
const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
|
||||
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame) {
|
||||
boxes = await this.handDetector.predict(image24, config3);
|
||||
boxes = await this.handDetector.predict(image25, config3);
|
||||
this.skipped = 0;
|
||||
}
|
||||
if (config3.skipAllowed)
|
||||
|
@ -9047,8 +9069,8 @@ var HandPipeline = class {
|
|||
if (config3.hand.landmarks) {
|
||||
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
|
||||
const palmCenter = getBoxCenter2(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image24.shape[2], palmCenter[1] / image24.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image24, angle, 0, palmCenterNormalized) : image24.clone();
|
||||
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf17.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
|
||||
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
|
||||
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
|
||||
|
@ -9894,11 +9916,11 @@ function bodyParts(body4) {
|
|||
}
|
||||
}
|
||||
}
|
||||
for (const [pair, compare] of relative) {
|
||||
for (const [pair, compare2] of relative) {
|
||||
const left = body4.keypoints.findIndex((kp) => kp && kp.part === pair[0]);
|
||||
const right = body4.keypoints.findIndex((kp) => kp && kp.part === pair[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare[1]);
|
||||
const leftTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[0]);
|
||||
const rightTo = body4.keypoints.findIndex((kp) => kp && kp.part === compare2[1]);
|
||||
if (!body4.keypoints[leftTo] || !body4.keypoints[rightTo])
|
||||
continue;
|
||||
const distanceLeft = body4.keypoints[left] ? [
|
||||
|
@ -9991,7 +10013,7 @@ async function load11(config3) {
|
|||
inputSize7 = 256;
|
||||
return model10;
|
||||
}
|
||||
async function parseSinglePose(res, config3, image24, inputBox) {
|
||||
async function parseSinglePose(res, config3, image25, inputBox) {
|
||||
const kpt4 = res[0][0];
|
||||
const keypoints = [];
|
||||
let score = 0;
|
||||
|
@ -10007,15 +10029,15 @@ async function parseSinglePose(res, config3, image24, inputBox) {
|
|||
part: kpt3[id],
|
||||
positionRaw,
|
||||
position: [
|
||||
Math.round((image24.shape[2] || 0) * positionRaw[0]),
|
||||
Math.round((image24.shape[1] || 0) * positionRaw[1])
|
||||
Math.round((image25.shape[2] || 0) * positionRaw[0]),
|
||||
Math.round((image25.shape[1] || 0) * positionRaw[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
score = keypoints.reduce((prev, curr) => curr.score > prev ? curr.score : prev, 0);
|
||||
const bodies = [];
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
|
||||
const annotations2 = {};
|
||||
for (const [name, indexes] of Object.entries(connected3)) {
|
||||
const pt = [];
|
||||
|
@ -10032,7 +10054,7 @@ async function parseSinglePose(res, config3, image24, inputBox) {
|
|||
bodies.push(body4);
|
||||
return bodies;
|
||||
}
|
||||
async function parseMultiPose(res, config3, image24, inputBox) {
|
||||
async function parseMultiPose(res, config3, image25, inputBox) {
|
||||
const bodies = [];
|
||||
for (let id = 0; id < res[0].length; id++) {
|
||||
const kpt4 = res[0][id];
|
||||
|
@ -10050,11 +10072,11 @@ async function parseMultiPose(res, config3, image24, inputBox) {
|
|||
part: kpt3[i],
|
||||
score: Math.round(100 * score) / 100,
|
||||
positionRaw,
|
||||
position: [Math.round((image24.shape[2] || 0) * positionRaw[0]), Math.round((image24.shape[1] || 0) * positionRaw[1])]
|
||||
position: [Math.round((image25.shape[2] || 0) * positionRaw[0]), Math.round((image25.shape[1] || 0) * positionRaw[1])]
|
||||
});
|
||||
}
|
||||
}
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image24.shape[2], image24.shape[1]]);
|
||||
const newBox = calc(keypoints.map((pt) => pt.position), [image25.shape[2], image25.shape[1]]);
|
||||
const annotations2 = {};
|
||||
for (const [name, indexes] of Object.entries(connected3)) {
|
||||
const pt = [];
|
||||
|
@ -10187,7 +10209,7 @@ async function process4(res, inputSize8, outputShape, config3) {
|
|||
results = results.filter((_val, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
|
||||
return results;
|
||||
}
|
||||
async function predict11(image24, config3) {
|
||||
async function predict11(image25, config3) {
|
||||
const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
|
||||
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
|
||||
if (config3.skipAllowed && skipTime && skipFrame && last5.length > 0) {
|
||||
|
@ -10198,8 +10220,8 @@ async function predict11(image24, config3) {
|
|||
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
|
||||
return last5;
|
||||
return new Promise(async (resolve) => {
|
||||
const outputSize2 = [image24.shape[2], image24.shape[1]];
|
||||
const resize = tf22.image.resizeBilinear(image24, [model11.inputSize, model11.inputSize], false);
|
||||
const outputSize2 = [image25.shape[2], image25.shape[1]];
|
||||
const resize = tf22.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
|
||||
const norm = tf22.div(resize, 255);
|
||||
const transpose = norm.transpose([0, 3, 1, 2]);
|
||||
tf22.dispose(norm);
|
||||
|
@ -12924,6 +12946,9 @@ var Human = class {
|
|||
enhance(input) {
|
||||
return enhance2(input);
|
||||
}
|
||||
compare(firstImageTensor, secondImageTensor) {
|
||||
return compare(this.config, firstImageTensor, secondImageTensor);
|
||||
}
|
||||
async init() {
|
||||
await check(this, true);
|
||||
await this.tf.ready();
|
||||
|
|
|
@ -184,7 +184,7 @@ export interface GestureConfig {
|
|||
* Contains all configurable parameters
|
||||
* @typedef Config
|
||||
*
|
||||
* Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L292)
|
||||
* Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262)
|
||||
*/
|
||||
export interface Config {
|
||||
/** Backend used for TFJS operations
|
||||
|
@ -196,16 +196,19 @@ export interface Config {
|
|||
backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu',
|
||||
|
||||
/** Path to *.wasm files if backend is set to `wasm`
|
||||
*
|
||||
* default: auto-detects to link to CDN `jsdelivr` when running in browser
|
||||
*/
|
||||
wasmPath: string,
|
||||
|
||||
/** Print debug statements to console
|
||||
*
|
||||
* default: `true`
|
||||
*/
|
||||
debug: boolean,
|
||||
|
||||
/** Perform model loading and inference concurrently or sequentially
|
||||
*
|
||||
* default: `true`
|
||||
*/
|
||||
async: boolean,
|
||||
|
@ -213,6 +216,7 @@ export interface Config {
|
|||
/** What to use for `human.warmup()`
|
||||
* - warmup pre-initializes all models for faster inference but can take significant time on startup
|
||||
* - used by `webgl`, `humangl` and `webgpu` backends
|
||||
*
|
||||
* default: `full`
|
||||
*/
|
||||
warmup: 'none' | 'face' | 'full' | 'body',
|
||||
|
@ -220,6 +224,7 @@ export interface Config {
|
|||
|
||||
/** Base model path (typically starting with file://, http:// or https://) for all models
|
||||
* - individual modelPath values are relative to this path
|
||||
*
|
||||
* default: `../models/` for browsers and `file://models/` for nodejs
|
||||
*/
|
||||
modelBasePath: string,
|
||||
|
@ -227,6 +232,7 @@ export interface Config {
|
|||
/** Cache sensitivity
|
||||
* - values 0..1 where 0.01 means reset cache if input changed more than 1%
|
||||
* - set to 0 to disable caching
|
||||
*
|
||||
* default: 0.7
|
||||
*/
|
||||
cacheSensitivity: number;
|
||||
|
@ -259,7 +265,7 @@ export interface Config {
|
|||
segmentation: Partial<SegmentationConfig>,
|
||||
}
|
||||
|
||||
/** - [See all default Config values...](https://github.com/vladmandic/human/blob/main/src/config.ts#L253) */
|
||||
/** - [See all default Config values...](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) */
|
||||
const config: Config = {
|
||||
backend: '',
|
||||
modelBasePath: '',
|
||||
|
|
14
src/human.ts
14
src/human.ts
|
@ -68,7 +68,7 @@ export class Human {
|
|||
version: string;
|
||||
|
||||
/** Current configuration
|
||||
* - Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L250)
|
||||
* - Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262)
|
||||
*/
|
||||
config: Config;
|
||||
|
||||
|
@ -267,6 +267,18 @@ export class Human {
|
|||
return faceres.enhance(input);
|
||||
}
|
||||
|
||||
/** Compare two input tensors for pixel simmilarity
|
||||
* - use `human.image` to process any valid input and get a tensor that can be used for compare
|
||||
* - when passing manually generated tensors:
|
||||
* - both input tensors must be in format [1, height, width, 3]
|
||||
* - if resolution of tensors does not match, second tensor will be resized to match resolution of the first tensor
|
||||
* @returns {number}
|
||||
* - return value is pixel similarity score normalized by input resolution and rgb channels
|
||||
*/
|
||||
compare(firstImageTensor: Tensor, secondImageTensor: Tensor): Promise<number> {
|
||||
return image.compare(this.config, firstImageTensor, secondImageTensor);
|
||||
}
|
||||
|
||||
/** Explicit backend initialization
|
||||
* - Normally done implicitly during initial load phase
|
||||
* - Call to explictly register and initialize TFJS backend without any other operations
|
||||
|
|
|
@ -272,3 +272,24 @@ export async function skip(config, input: Tensor) {
|
|||
}
|
||||
return skipFrame;
|
||||
}
|
||||
|
||||
export async function compare(config, input1: Tensor, input2: Tensor): Promise<number> {
|
||||
const t: Record<string, Tensor> = {};
|
||||
if (!input1 || !input2 || input1.shape.length !== 4 || input1.shape.length !== input2.shape.length) {
|
||||
if (!config.debug) log('invalid input tensor or tensor shapes do not match:', input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
if (input1.shape[0] !== 1 || input2.shape[0] !== 1 || input1.shape[3] !== 3 || input2.shape[3] !== 3) {
|
||||
if (!config.debug) log('input tensors must be of shape [1, height, width, 3]:', input1.shape, input2.shape);
|
||||
return 0;
|
||||
}
|
||||
t.input1 = tf.clone(input1);
|
||||
t.input2 = (input1.shape[1] !== input2.shape[1] || input1.shape[2] !== input2.shape[2]) ? tf.image.resizeBilinear(input2, [input1.shape[1], input1.shape[2]]) : tf.clone(input2);
|
||||
t.diff = tf.sub(t.input1, t.input2);
|
||||
t.squared = tf.mul(t.diff, t.diff);
|
||||
t.sum = tf.sum(t.squared);
|
||||
const diffSum = await t.sum.data();
|
||||
const diffRelative = diffSum[0] / (input1.shape[1] || 1) / (input1.shape[2] || 1) / 255 / 3;
|
||||
tf.dispose([t.input1, t.input2, t.diff, t.squared, t.sum]);
|
||||
return diffRelative;
|
||||
}
|
||||
|
|
|
@ -6,11 +6,10 @@ Not required for normal funcioning of library
|
|||
|
||||
### NodeJS using TensorFlow library
|
||||
|
||||
- Image filters are disabled due to lack of Canvas and WeBGL access
|
||||
- Image filters are disabled due to lack of Canvas and WebGL access
|
||||
- Face rotation is disabled for `NodeJS` platform:
|
||||
`Kernel 'RotateWithOffset' not registered for backend 'tensorflow'`
|
||||
<https://github.com/tensorflow/tfjs/issues/4606>
|
||||
Work has recently been completed and will likely be included in TFJS 3.9.0
|
||||
|
||||
### NodeJS using WASM
|
||||
|
||||
|
@ -18,27 +17,11 @@ Not required for normal funcioning of library
|
|||
See <https://github.com/tensorflow/tfjs/issues/4927>
|
||||
- Image filters are disabled due to lack of Canvas and WeBGL access
|
||||
- Only supported input is Tensor due to missing image decoders
|
||||
- Warmup returns null and is marked as failed
|
||||
Missing image decode in `tfjs-core`
|
||||
- Fails on object detection:
|
||||
`Kernel 'SparseToDense' not registered for backend 'wasm'`
|
||||
<https://github.com/tensorflow/tfjs/issues/4824>
|
||||
|
||||
<br>
|
||||
|
||||
## Manual Tests
|
||||
## Browser Tests
|
||||
|
||||
### Browser using WebGL backend
|
||||
|
||||
- Chrome/Edge: All Passing
|
||||
- Firefox: WebWorkers not supported due to missing support for OffscreenCanvas
|
||||
- Safari: Limited Testing
|
||||
|
||||
### Browser using WASM backend
|
||||
|
||||
- Chrome/Edge: All Passing
|
||||
- Firefox: WebWorkers not supported due to missing support for OffscreenCanvas
|
||||
- Safari: Limited Testing
|
||||
- Fails on object detection:
|
||||
`Kernel 'SparseToDense' not registered for backend 'wasm'`
|
||||
<https://github.com/tensorflow/tfjs/issues/4824>
|
||||
- Chrome/Edge: **All Passing**
|
||||
- Firefox: WebWorkers not supported due to missing support for `OffscreenCanvas`
|
||||
- Safari: **Limited Testing**
|
||||
|
|
|
@ -1,25 +1,25 @@
|
|||
2021-11-06 10:12:23 [36mINFO: [39m @vladmandic/human version 2.5.0
|
||||
2021-11-06 10:12:23 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v17.0.1
|
||||
2021-11-06 10:12:23 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.5.0"}
|
||||
2021-11-06 10:12:23 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2021-11-06 10:12:23 [36mINFO: [39m Toolchain: {"build":"0.6.3","esbuild":"0.13.12","typescript":"4.4.4","typedoc":"0.22.7","eslint":"8.2.0"}
|
||||
2021-11-06 10:12:23 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":56,"inputBytes":520786,"outputBytes":440096}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":56,"inputBytes":520794,"outputBytes":440100}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":56,"inputBytes":520861,"outputBytes":440172}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2326,"outputBytes":912}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":56,"inputBytes":520423,"outputBytes":441983}
|
||||
2021-11-06 10:12:23 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2562175,"outputBytes":2497378}
|
||||
2021-11-06 10:12:24 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":56,"inputBytes":3016889,"outputBytes":3096312}
|
||||
2021-11-06 10:12:24 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":56,"inputBytes":3016889,"outputBytes":2945011}
|
||||
2021-11-06 10:12:43 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types","files":49}
|
||||
2021-11-06 10:12:49 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":48,"generated":true}
|
||||
2021-11-06 10:12:49 [35mSTATE:[39m Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5851,"outputBytes":3818}
|
||||
2021-11-06 10:13:25 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":90,"errors":0,"warnings":0}
|
||||
2021-11-06 10:13:26 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2021-11-06 10:13:26 [36mINFO: [39m Done...
|
||||
2021-11-07 09:49:34 [36mINFO: [39m @vladmandic/human version 2.5.0
|
||||
2021-11-07 09:49:34 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v17.0.1
|
||||
2021-11-07 09:49:34 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.5.0"}
|
||||
2021-11-07 09:49:34 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2021-11-07 09:49:34 [36mINFO: [39m Toolchain: {"build":"0.6.3","esbuild":"0.13.12","typescript":"4.4.4","typedoc":"0.22.7","eslint":"8.2.0"}
|
||||
2021-11-07 09:49:34 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2021-11-07 09:49:34 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2021-11-07 09:49:34 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
|
||||
2021-11-07 09:49:34 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":56,"inputBytes":522624,"outputBytes":441314}
|
||||
2021-11-07 09:49:34 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
|
||||
2021-11-07 09:49:34 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":56,"inputBytes":522632,"outputBytes":441318}
|
||||
2021-11-07 09:49:34 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
|
||||
2021-11-07 09:49:34 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":56,"inputBytes":522699,"outputBytes":441390}
|
||||
2021-11-07 09:49:34 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
|
||||
2021-11-07 09:49:34 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2326,"outputBytes":912}
|
||||
2021-11-07 09:49:34 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":56,"inputBytes":522261,"outputBytes":443292}
|
||||
2021-11-07 09:49:35 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2562175,"outputBytes":2497378}
|
||||
2021-11-07 09:49:35 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":56,"inputBytes":3018727,"outputBytes":3097554}
|
||||
2021-11-07 09:49:35 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":56,"inputBytes":3018727,"outputBytes":2946203}
|
||||
2021-11-07 09:49:55 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types","files":49}
|
||||
2021-11-07 09:50:02 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":48,"generated":true}
|
||||
2021-11-07 09:50:02 [35mSTATE:[39m Compile: {"name":"demo/browser","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5851,"outputBytes":3818}
|
||||
2021-11-07 09:50:38 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":90,"errors":0,"warnings":0}
|
||||
2021-11-07 09:50:39 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2021-11-07 09:50:39 [36mINFO: [39m Done...
|
||||
|
|
|
@ -135,12 +135,55 @@ async function testDetect(human, input, title, checkLeak = true) {
|
|||
}
|
||||
return detect;
|
||||
}
|
||||
|
||||
const evt = { image: 0, detect: 0, warmup: 0 };
|
||||
async function events(event) {
|
||||
log('state', 'event:', event);
|
||||
evt[event]++;
|
||||
}
|
||||
|
||||
const verify = (state, ...messages) => {
|
||||
if (state) log('state', 'passed:', ...messages);
|
||||
else log('error', 'failed:', ...messages);
|
||||
};
|
||||
|
||||
async function verifyDetails(human) {
|
||||
const res = await testDetect(human, 'samples/in/ai-body.jpg', 'default');
|
||||
verify(res.face.length === 1, 'details face length', res.face.length);
|
||||
for (const face of res.face) {
|
||||
verify(face.score > 0.9 && face.boxScore > 0.9 && face.faceScore > 0.9, 'details face score', face.score, face.boxScore, face.faceScore);
|
||||
verify(face.age > 29 && face.age < 30 && face.gender === 'female' && face.genderScore > 0.9 && face.iris > 70 && face.iris < 80, 'details face age/gender', face.age, face.gender, face.genderScore, face.iris);
|
||||
verify(face.box.length === 4 && face.boxRaw.length === 4 && face.mesh.length === 478 && face.meshRaw.length === 478 && face.embedding.length === 1024, 'details face arrays', face.box.length, face.mesh.length, face.embedding.length);
|
||||
verify(face.emotion.length === 3 && face.emotion[0].score > 0.5 && face.emotion[0].emotion === 'angry', 'details face emotion', face.emotion.length, face.emotion[0]);
|
||||
}
|
||||
verify(res.body.length === 1, 'details body length', res.body.length);
|
||||
for (const body of res.body) {
|
||||
verify(body.score > 0.9 && body.box.length === 4 && body.boxRaw.length === 4 && body.keypoints.length === 17 && Object.keys(body.annotations).length === 6, 'details body', body.score, body.keypoints.length, Object.keys(body.annotations).length);
|
||||
}
|
||||
verify(res.hand.length === 1, 'details hand length', res.hand.length);
|
||||
for (const hand of res.hand) {
|
||||
verify(hand.score > 0.5 && hand.boxScore > 0.5 && hand.fingerScore > 0.5 && hand.box.length === 4 && hand.boxRaw.length === 4 && hand.label === 'point', 'details hand', hand.boxScore, hand.fingerScore, hand.label);
|
||||
verify(hand.keypoints.length === 21 && Object.keys(hand.landmarks).length === 5 && Object.keys(hand.annotations).length === 6, 'details hand arrays', hand.keypoints.length, Object.keys(hand.landmarks).length, Object.keys(hand.annotations).length);
|
||||
}
|
||||
verify(res.gesture.length === 5, 'details gesture length', res.gesture.length);
|
||||
verify(res.gesture[0].gesture === 'facing right', 'details gesture first', res.gesture[0]);
|
||||
verify(res.object.length === 1, 'details object length', res.object.length);
|
||||
for (const obj of res.object) {
|
||||
verify(obj.score > 0.7 && obj.label === 'person' && obj.box.length === 4 && obj.boxRaw.length === 4, 'details object', obj.score, obj.label);
|
||||
}
|
||||
}
|
||||
|
||||
async function verifyCompare(human) {
|
||||
const t1 = await getImage(human, 'samples/in/ai-face.jpg');
|
||||
const t2 = await getImage(human, 'samples/in/ai-body.jpg');
|
||||
const n1 = await human.compare(t1, t1);
|
||||
const n2 = await human.compare(t1, t2);
|
||||
const n3 = await human.compare(t2, t1);
|
||||
const n4 = await human.compare(t2, t2);
|
||||
verify(n1 === 0 && n4 === 0 && Math.round(n2) === Math.round(n3) && n2 > 20 && n2 < 30, 'image compare', n1, n2);
|
||||
human.tf.dispose([t1, t2]);
|
||||
}
|
||||
|
||||
async function test(Human, inputConfig) {
|
||||
config = inputConfig;
|
||||
fetch = (await import('node-fetch')).default;
|
||||
|
@ -202,6 +245,8 @@ async function test(Human, inputConfig) {
|
|||
gestures: res.gesture,
|
||||
});
|
||||
|
||||
await verifyDetails(human);
|
||||
|
||||
// test default config async
|
||||
log('info', 'test default');
|
||||
human.reset();
|
||||
|
@ -357,6 +402,9 @@ async function test(Human, inputConfig) {
|
|||
if (tensors1 === tensors2 && tensors1 === tensors3 && tensors2 === tensors3) log('state', 'passeed: equal usage');
|
||||
else log('error', 'failed: equal usage', tensors1, tensors2, tensors3);
|
||||
|
||||
// validate cache compare algorithm
|
||||
await verifyCompare(human);
|
||||
|
||||
// tests end
|
||||
const t1 = process.hrtime.bigint();
|
||||
|
||||
|
|
1093
test/test.log
1093
test/test.log
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -75,7 +75,7 @@
|
|||
<li>Can be TFJS that is bundled with <code>Human</code> or a manually imported TFJS library</li>
|
||||
</ul>
|
||||
</div><dl class="tsd-comment-tags"><dt>external</dt><dd><p><a href="https://js.tensorflow.org/api/latest/">API</a></p>
|
||||
</dd></dl></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Variables</h2><section class="tsd-panel tsd-member tsd-kind-variable"><a id="defaults" class="tsd-anchor"></a><h3>defaults</h3><div class="tsd-signature tsd-kind-icon">defaults<span class="tsd-signature-symbol">:</span> <a href="interfaces/Config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> = ...</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L263">src/config.ts:263</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</dd></dl></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Variables</h2><section class="tsd-panel tsd-member tsd-kind-variable"><a id="defaults" class="tsd-anchor"></a><h3>defaults</h3><div class="tsd-signature tsd-kind-icon">defaults<span class="tsd-signature-symbol">:</span> <a href="interfaces/Config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> = ...</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L269">src/config.ts:269</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<ul>
|
||||
<li><a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L253">See all default Config values...</a></li>
|
||||
</ul>
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
<!DOCTYPE html><html class="default no-js"><head><meta charSet="utf-8"/><meta http-equiv="x-ua-compatible" content="IE=edge"/><title>Config | @vladmandic/human - v2.5.0</title><meta name="description" content="Documentation for @vladmandic/human - v2.5.0"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="stylesheet" href="../assets/style.css"/><link rel="stylesheet" href="../assets/highlight.css"/><script async src="../assets/search.js" id="search-script"></script></head><body><script>document.body.classList.add(localStorage.getItem("tsd-theme") || "os")</script><header><div class="tsd-page-toolbar"><div class="container"><div class="table-wrap"><div class="table-cell" id="tsd-search" data-base=".."><div class="field"><label for="tsd-search-field" class="tsd-widget search no-caption">Search</label><input type="text" id="tsd-search-field"/></div><ul class="results"><li class="state loading">Preparing search index...</li><li class="state failure">The search index is not available</li></ul><a href="../index.html" class="title">@vladmandic/human - v2.5.0</a></div><div class="table-cell" id="tsd-widgets"><div id="tsd-filter"><a href="#" class="tsd-widget options no-caption" data-toggle="options">Options</a><div class="tsd-filter-group"><div class="tsd-select" id="tsd-filter-visibility"><span class="tsd-select-label">All</span><ul class="tsd-select-list"><li data-value="public">Public</li><li data-value="protected">Public/Protected</li><li data-value="private" class="selected">All</li></ul></div> <input type="checkbox" id="tsd-filter-inherited" checked/><label class="tsd-widget" for="tsd-filter-inherited">Inherited</label></div></div><a href="#" class="tsd-widget menu no-caption" data-toggle="menu">Menu</a></div></div></div></div><div class="tsd-page-title"><div class="container"><ul class="tsd-breadcrumb"><li><a href="../index.html">@vladmandic/human - v2.5.0</a></li><li><a href="Config.html">Config</a></li></ul><h1>Interface Config</h1></div></div></header><div class="container container-main"><div class="row"><div class="col-8 col-content"><section class="tsd-panel tsd-comment"><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Configuration interface definition for <strong>Human</strong> library</p>
|
||||
</div><div><p>Contains all configurable parameters</p>
|
||||
</div></div></section><section class="tsd-panel tsd-hierarchy"><h3>Hierarchy</h3><ul class="tsd-hierarchy"><li><span class="target">Config</span></li></ul></section><section class="tsd-panel-group tsd-index-group"><h2>Index</h2><section class="tsd-panel tsd-index-panel"><div class="tsd-index-content"><section class="tsd-index-section "><h3>Properties</h3><ul class="tsd-index-list"><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#deallocate" class="tsd-kind-icon">deallocate</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipAllowed" class="tsd-kind-icon">skip<wbr/>Allowed</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></section></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Properties</h2><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="async" class="tsd-anchor"></a><h3>async</h3><div class="tsd-signature tsd-kind-icon">async<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L211">src/config.ts:211</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Perform model loading and inference concurrently or sequentially
|
||||
default: <code>true</code></p>
|
||||
</div></div></section><section class="tsd-panel tsd-hierarchy"><h3>Hierarchy</h3><ul class="tsd-hierarchy"><li><span class="target">Config</span></li></ul></section><section class="tsd-panel-group tsd-index-group"><h2>Index</h2><section class="tsd-panel tsd-index-panel"><div class="tsd-index-content"><section class="tsd-index-section "><h3>Properties</h3><ul class="tsd-index-list"><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#deallocate" class="tsd-kind-icon">deallocate</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipAllowed" class="tsd-kind-icon">skip<wbr/>Allowed</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></section></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Properties</h2><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="async" class="tsd-anchor"></a><h3>async</h3><div class="tsd-signature tsd-kind-icon">async<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L214">src/config.ts:214</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Perform model loading and inference concurrently or sequentially</p>
|
||||
</div><div><p>default: <code>true</code></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="backend" class="tsd-anchor"></a><h3>backend</h3><div class="tsd-signature tsd-kind-icon">backend<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">""</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"cpu"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"wasm"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"webgl"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"humangl"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"tensorflow"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"webgpu"</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L196">src/config.ts:196</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Backend used for TFJS operations
|
||||
valid build-in backends are:</p>
|
||||
|
@ -12,48 +12,48 @@ valid build-in backends are:</p>
|
|||
<li>NodeJS: <code>cpu</code>, <code>wasm</code>, <code>tensorflow</code>
|
||||
default: <code>humangl</code> for browser and <code>tensorflow</code> for nodejs</li>
|
||||
</ul>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="body" class="tsd-anchor"></a><h3>body</h3><div class="tsd-signature tsd-kind-icon">body<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="BodyConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">BodyConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L250">src/config.ts:250</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="body" class="tsd-anchor"></a><h3>body</h3><div class="tsd-signature tsd-kind-icon">body<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="BodyConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">BodyConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L256">src/config.ts:256</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p><a href="BodyConfig.html">BodyConfig</a></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="cacheSensitivity" class="tsd-anchor"></a><h3>cache<wbr/>Sensitivity</h3><div class="tsd-signature tsd-kind-icon">cache<wbr/>Sensitivity<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L232">src/config.ts:232</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="cacheSensitivity" class="tsd-anchor"></a><h3>cache<wbr/>Sensitivity</h3><div class="tsd-signature tsd-kind-icon">cache<wbr/>Sensitivity<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L238">src/config.ts:238</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Cache sensitivity</p>
|
||||
<ul>
|
||||
<li>values 0..1 where 0.01 means reset cache if input changed more than 1%</li>
|
||||
<li>set to 0 to disable caching
|
||||
default: 0.7</li>
|
||||
<li>set to 0 to disable caching</li>
|
||||
</ul>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="deallocate" class="tsd-anchor"></a><h3>deallocate</h3><div class="tsd-signature tsd-kind-icon">deallocate<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L235">src/config.ts:235</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div><div><p>default: 0.7</p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="deallocate" class="tsd-anchor"></a><h3>deallocate</h3><div class="tsd-signature tsd-kind-icon">deallocate<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L241">src/config.ts:241</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Perform immediate garbage collection on deallocated tensors instead of caching them</p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="debug" class="tsd-anchor"></a><h3>debug</h3><div class="tsd-signature tsd-kind-icon">debug<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L206">src/config.ts:206</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Print debug statements to console
|
||||
default: <code>true</code></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="face" class="tsd-anchor"></a><h3>face</h3><div class="tsd-signature tsd-kind-icon">face<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="FaceConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">FaceConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L247">src/config.ts:247</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="debug" class="tsd-anchor"></a><h3>debug</h3><div class="tsd-signature tsd-kind-icon">debug<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L208">src/config.ts:208</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Print debug statements to console</p>
|
||||
</div><div><p>default: <code>true</code></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="face" class="tsd-anchor"></a><h3>face</h3><div class="tsd-signature tsd-kind-icon">face<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="FaceConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">FaceConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L253">src/config.ts:253</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p><a href="FaceConfig.html">FaceConfig</a></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="filter" class="tsd-anchor"></a><h3>filter</h3><div class="tsd-signature tsd-kind-icon">filter<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="FilterConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">FilterConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L241">src/config.ts:241</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="filter" class="tsd-anchor"></a><h3>filter</h3><div class="tsd-signature tsd-kind-icon">filter<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="FilterConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">FilterConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L247">src/config.ts:247</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p><a href="FilterConfig.html">FilterConfig</a></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="gesture" class="tsd-anchor"></a><h3>gesture</h3><div class="tsd-signature tsd-kind-icon">gesture<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="GestureConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">GestureConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L244">src/config.ts:244</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="gesture" class="tsd-anchor"></a><h3>gesture</h3><div class="tsd-signature tsd-kind-icon">gesture<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="GestureConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">GestureConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L250">src/config.ts:250</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p><a href="GestureConfig.html">GestureConfig</a></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="hand" class="tsd-anchor"></a><h3>hand</h3><div class="tsd-signature tsd-kind-icon">hand<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="HandConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">HandConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L253">src/config.ts:253</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="hand" class="tsd-anchor"></a><h3>hand</h3><div class="tsd-signature tsd-kind-icon">hand<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="HandConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">HandConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L259">src/config.ts:259</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p><a href="HandConfig.html">HandConfig</a></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="modelBasePath" class="tsd-anchor"></a><h3>model<wbr/>Base<wbr/>Path</h3><div class="tsd-signature tsd-kind-icon">model<wbr/>Base<wbr/>Path<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">string</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L225">src/config.ts:225</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="modelBasePath" class="tsd-anchor"></a><h3>model<wbr/>Base<wbr/>Path</h3><div class="tsd-signature tsd-kind-icon">model<wbr/>Base<wbr/>Path<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">string</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L230">src/config.ts:230</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Base model path (typically starting with file://, http:// or https://) for all models</p>
|
||||
<ul>
|
||||
<li>individual modelPath values are relative to this path
|
||||
default: <code>../models/</code> for browsers and <code>file://models/</code> for nodejs</li>
|
||||
<li>individual modelPath values are relative to this path</li>
|
||||
</ul>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="object" class="tsd-anchor"></a><h3>object</h3><div class="tsd-signature tsd-kind-icon">object<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="ObjectConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">ObjectConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L256">src/config.ts:256</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div><div><p>default: <code>../models/</code> for browsers and <code>file://models/</code> for nodejs</p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="object" class="tsd-anchor"></a><h3>object</h3><div class="tsd-signature tsd-kind-icon">object<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="ObjectConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">ObjectConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L262">src/config.ts:262</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p><a href="ObjectConfig.html">ObjectConfig</a></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="segmentation" class="tsd-anchor"></a><h3>segmentation</h3><div class="tsd-signature tsd-kind-icon">segmentation<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="SegmentationConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">SegmentationConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L259">src/config.ts:259</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="segmentation" class="tsd-anchor"></a><h3>segmentation</h3><div class="tsd-signature tsd-kind-icon">segmentation<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol"><</span><a href="SegmentationConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">SegmentationConfig</a><span class="tsd-signature-symbol">></span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L265">src/config.ts:265</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p><a href="SegmentationConfig.html">SegmentationConfig</a></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="skipAllowed" class="tsd-anchor"></a><h3>skip<wbr/>Allowed</h3><div class="tsd-signature tsd-kind-icon">skip<wbr/>Allowed<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L238">src/config.ts:238</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="skipAllowed" class="tsd-anchor"></a><h3>skip<wbr/>Allowed</h3><div class="tsd-signature tsd-kind-icon">skip<wbr/>Allowed<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L244">src/config.ts:244</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Internal Variable</p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="warmup" class="tsd-anchor"></a><h3>warmup</h3><div class="tsd-signature tsd-kind-icon">warmup<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">"face"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"body"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"none"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"full"</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L218">src/config.ts:218</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="warmup" class="tsd-anchor"></a><h3>warmup</h3><div class="tsd-signature tsd-kind-icon">warmup<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">"face"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"body"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"none"</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">"full"</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L222">src/config.ts:222</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>What to use for <code>human.warmup()</code></p>
|
||||
<ul>
|
||||
<li>warmup pre-initializes all models for faster inference but can take significant time on startup</li>
|
||||
<li>used by <code>webgl</code>, <code>humangl</code> and <code>webgpu</code> backends
|
||||
default: <code>full</code></li>
|
||||
<li>used by <code>webgl</code>, <code>humangl</code> and <code>webgpu</code> backends</li>
|
||||
</ul>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="wasmPath" class="tsd-anchor"></a><h3>wasm<wbr/>Path</h3><div class="tsd-signature tsd-kind-icon">wasm<wbr/>Path<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">string</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L201">src/config.ts:201</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Path to *.wasm files if backend is set to <code>wasm</code>
|
||||
default: auto-detects to link to CDN <code>jsdelivr</code> when running in browser</p>
|
||||
</div><div><p>default: <code>full</code></p>
|
||||
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="wasmPath" class="tsd-anchor"></a><h3>wasm<wbr/>Path</h3><div class="tsd-signature tsd-kind-icon">wasm<wbr/>Path<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">string</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L202">src/config.ts:202</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Path to *.wasm files if backend is set to <code>wasm</code></p>
|
||||
</div><div><p>default: auto-detects to link to CDN <code>jsdelivr</code> when running in browser</p>
|
||||
</div></div></section></section></div><div class="col-4 col-menu menu-sticky-wrap menu-highlight"><nav class="tsd-navigation primary"><ul><li class=""><a href="../index.html">Exports</a></li><li class=" tsd-kind-namespace"><a href="../modules/Tensor.html">Tensor</a></li></ul></nav><nav class="tsd-navigation secondary menu-sticky"><ul><li class="current tsd-kind-interface"><a href="Config.html" class="tsd-kind-icon">Config</a><ul><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#deallocate" class="tsd-kind-icon">deallocate</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipAllowed" class="tsd-kind-icon">skip<wbr/>Allowed</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></li></ul></nav></div></div></div><footer class=""><div class="container"><h2>Legend</h2><div class="tsd-legend-group"><ul class="tsd-legend"><li class="tsd-kind-constructor tsd-parent-kind-class"><span class="tsd-kind-icon">Constructor</span></li><li class="tsd-kind-property tsd-parent-kind-class"><span class="tsd-kind-icon">Property</span></li><li class="tsd-kind-method tsd-parent-kind-class"><span class="tsd-kind-icon">Method</span></li></ul><ul class="tsd-legend"><li class="tsd-kind-property tsd-parent-kind-interface"><span class="tsd-kind-icon">Property</span></li></ul></div><h2>Settings</h2><p>Theme <select id="theme"><option value="os">OS</option><option value="light">Light</option><option value="dark">Dark</option></select></p></div></footer><div class="overlay"></div><script src="../assets/main.js"></script></body></html>
|
|
@ -181,31 +181,37 @@ export interface Config {
|
|||
*/
|
||||
backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu';
|
||||
/** Path to *.wasm files if backend is set to `wasm`
|
||||
*
|
||||
* default: auto-detects to link to CDN `jsdelivr` when running in browser
|
||||
*/
|
||||
wasmPath: string;
|
||||
/** Print debug statements to console
|
||||
*
|
||||
* default: `true`
|
||||
*/
|
||||
debug: boolean;
|
||||
/** Perform model loading and inference concurrently or sequentially
|
||||
*
|
||||
* default: `true`
|
||||
*/
|
||||
async: boolean;
|
||||
/** What to use for `human.warmup()`
|
||||
* - warmup pre-initializes all models for faster inference but can take significant time on startup
|
||||
* - used by `webgl`, `humangl` and `webgpu` backends
|
||||
*
|
||||
* default: `full`
|
||||
*/
|
||||
warmup: 'none' | 'face' | 'full' | 'body';
|
||||
/** Base model path (typically starting with file://, http:// or https://) for all models
|
||||
* - individual modelPath values are relative to this path
|
||||
*
|
||||
* default: `../models/` for browsers and `file://models/` for nodejs
|
||||
*/
|
||||
modelBasePath: string;
|
||||
/** Cache sensitivity
|
||||
* - values 0..1 where 0.01 means reset cache if input changed more than 1%
|
||||
* - set to 0 to disable caching
|
||||
*
|
||||
* default: 0.7
|
||||
*/
|
||||
cacheSensitivity: number;
|
||||
|
|
|
@ -160,6 +160,15 @@ export declare class Human {
|
|||
* @returns Tensor
|
||||
*/
|
||||
enhance(input: Tensor): Tensor | null;
|
||||
/** Compare two input tensors for pixel simmilarity
|
||||
* - use `human.image` to process any valid input and get a tensor that can be used for compare
|
||||
* - when passing manually generated tensors:
|
||||
* - both input tensors must be in format [1, height, width, 3]
|
||||
* - if resolution of tensors does not match, second tensor will be resized to match resolution of the first tensor
|
||||
* @returns {number}
|
||||
* - return value is pixel similarity score normalized by input resolution and rgb channels
|
||||
*/
|
||||
compare(firstImageTensor: Tensor, secondImageTensor: Tensor): Promise<number>;
|
||||
/** Explicit backend initialization
|
||||
* - Normally done implicitly during initial load phase
|
||||
* - Call to explictly register and initialize TFJS backend without any other operations
|
||||
|
|
|
@ -9,3 +9,4 @@ export declare function process(input: Input, config: Config, getTensor?: boolea
|
|||
canvas: AnyCanvas | null;
|
||||
}>;
|
||||
export declare function skip(config: any, input: Tensor): Promise<boolean>;
|
||||
export declare function compare(config: any, input1: Tensor, input2: Tensor): Promise<number>;
|
||||
|
|
Loading…
Reference in New Issue