time based caching

pull/233/head
Vladimir Mandic 2021-10-23 09:38:52 -04:00
parent 89ac004612
commit a64d6e9830
62 changed files with 1454 additions and 1351 deletions

View File

@ -60,6 +60,7 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) ap
- [**Usage & Functions**](https://github.com/vladmandic/human/wiki/Usage) - [**Usage & Functions**](https://github.com/vladmandic/human/wiki/Usage)
- [**Configuration Details**](https://github.com/vladmandic/human/wiki/Configuration) - [**Configuration Details**](https://github.com/vladmandic/human/wiki/Configuration)
- [**Output Details**](https://github.com/vladmandic/human/wiki/Outputs) - [**Output Details**](https://github.com/vladmandic/human/wiki/Outputs)
- [**Caching & Smoothing**](https://github.com/vladmandic/human/wiki/Caching)
- [**Face Recognition & Face Description**](https://github.com/vladmandic/human/wiki/Embedding) - [**Face Recognition & Face Description**](https://github.com/vladmandic/human/wiki/Embedding)
- [**Gesture Recognition**](https://github.com/vladmandic/human/wiki/Gesture) - [**Gesture Recognition**](https://github.com/vladmandic/human/wiki/Gesture)
- [**Common Issues**](https://github.com/vladmandic/human/wiki/Issues) - [**Common Issues**](https://github.com/vladmandic/human/wiki/Issues)

View File

@ -2,16 +2,14 @@
## Work in Progress ## Work in Progress
- `skipTime`: testing, documentation
<br> <br>
### Exploring ### Exploring
- Optical Flow: <https://docs.opencv.org/3.3.1/db/d7f/tutorial_js_lucas_kanade.html>
- TFLite Models: <https://js.tensorflow.org/api_tflite/0.0.1-alpha.4/>
- Histogram Equalization: Regular, Adaptive, Contrast Limited
- Switch to custom `tfjs` for main `human` ESM bundle - Switch to custom `tfjs` for main `human` ESM bundle
- Optical Flow: <https://docs.opencv.org/3.3.1/db/d7f/tutorial_js_lucas_kanade.html>
- Histogram Equalization: Regular, Adaptive, Contrast Limited
- TFLite Models: <https://js.tensorflow.org/api_tflite/0.0.1-alpha.4/>
- Body segmentation: `robust-video-matting` - Body segmentation: `robust-video-matting`
#### WebGPU #### WebGPU
@ -55,6 +53,7 @@ Object detection using CenterNet or NanoDet models is not working when using WAS
## Pending Release ## Pending Release
- Update to TFJS 3.10.0 - Update to TFJS 3.10.0
- Time based caching
- Multiple bug fixes - Multiple bug fixes
- Utility class `human.env` - Utility class `human.env`
- Add `skipTime` in addition to `skipFrames` - Add `skipTime` in addition to `skipFrames`

View File

@ -1016,6 +1016,7 @@ async function main() {
// create instance of human // create instance of human
human = new Human(userConfig); human = new Human(userConfig);
human.env.perfadd = true;
log('human version:', human.version); log('human version:', human.version);
// we've merged human defaults with user config and now lets store it back so it can be accessed by methods such as menu // we've merged human defaults with user config and now lets store it back so it can be accessed by methods such as menu

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -115,8 +115,8 @@ var config = {
debug: true, debug: true,
async: true, async: true,
warmup: "full", warmup: "full",
cacheSensitivity: 0.75, cacheSensitivity: 0.7,
skipFrame: false, skipAllowed: false,
filter: { filter: {
enabled: true, enabled: true,
width: 0, width: 0,
@ -146,8 +146,8 @@ var config = {
modelPath: "blazeface.json", modelPath: "blazeface.json",
rotation: true, rotation: true,
maxDetected: 1, maxDetected: 1,
skipFrames: 11, skipFrames: 99,
skipTime: 2e3, skipTime: 2500,
minConfidence: 0.2, minConfidence: 0.2,
iouThreshold: 0.1, iouThreshold: 0.1,
return: false return: false
@ -163,21 +163,21 @@ var config = {
emotion: { emotion: {
enabled: true, enabled: true,
minConfidence: 0.1, minConfidence: 0.1,
skipFrames: 12, skipFrames: 99,
skipTime: 2e3, skipTime: 1500,
modelPath: "emotion.json" modelPath: "emotion.json"
}, },
description: { description: {
enabled: true, enabled: true,
modelPath: "faceres.json", modelPath: "faceres.json",
skipFrames: 13, skipFrames: 99,
skipTime: 2e3, skipTime: 3e3,
minConfidence: 0.1 minConfidence: 0.1
}, },
antispoof: { antispoof: {
enabled: false, enabled: false,
skipFrames: 14, skipFrames: 99,
skipTime: 2e3, skipTime: 4e3,
modelPath: "antispoof.json" modelPath: "antispoof.json"
} }
}, },
@ -190,12 +190,12 @@ var config = {
maxDetected: -1, maxDetected: -1,
minConfidence: 0.3, minConfidence: 0.3,
skipFrames: 1, skipFrames: 1,
skipTime: 2e3 skipTime: 200
}, },
hand: { hand: {
enabled: true, enabled: true,
rotation: true, rotation: true,
skipFrames: 2, skipFrames: 99,
skipTime: 2e3, skipTime: 2e3,
minConfidence: 0.5, minConfidence: 0.5,
iouThreshold: 0.2, iouThreshold: 0.2,
@ -214,8 +214,8 @@ var config = {
minConfidence: 0.2, minConfidence: 0.2,
iouThreshold: 0.4, iouThreshold: 0.4,
maxDetected: 10, maxDetected: 10,
skipFrames: 15, skipFrames: 99,
skipTime: 2e3 skipTime: 1e3
}, },
segmentation: { segmentation: {
enabled: false, enabled: false,
@ -1181,6 +1181,7 @@ var Env = class {
__publicField(this, "filter"); __publicField(this, "filter");
__publicField(this, "tfjs"); __publicField(this, "tfjs");
__publicField(this, "offscreen"); __publicField(this, "offscreen");
__publicField(this, "perfadd", false);
__publicField(this, "wasm", { __publicField(this, "wasm", {
supported: void 0, supported: void 0,
backend: void 0, backend: void 0,
@ -1304,7 +1305,7 @@ var model2;
var cached = []; var cached = [];
var skipped2 = Number.MAX_SAFE_INTEGER; var skipped2 = Number.MAX_SAFE_INTEGER;
var lastCount = 0; var lastCount = 0;
var last = 0; var lastTime = 0;
async function load2(config3) { async function load2(config3) {
var _a, _b; var _a, _b;
if (env.initial) if (env.initial)
@ -1323,7 +1324,9 @@ async function predict(image25, config3, idx, count2) {
var _a, _b; var _a, _b;
if (!model2) if (!model2)
return null; return null;
if (skipped2 < (((_a = config3.face.antispoof) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.antispoof) == null ? void 0 : _b.skipTime) || 0) <= now() - last && config3.skipFrame && lastCount === count2 && cached[idx]) { const skipTime = (((_a = config3.face.antispoof) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime;
const skipFrame = skipped2 < (((_b = config3.face.antispoof) == null ? void 0 : _b.skipFrames) || 0);
if (config3.skipAllowed && skipTime && skipFrame && lastCount === count2 && cached[idx]) {
skipped2++; skipped2++;
return cached[idx]; return cached[idx];
} }
@ -1334,7 +1337,7 @@ async function predict(image25, config3, idx, count2) {
const num = (await res.data())[0]; const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100; cached[idx] = Math.round(100 * num) / 100;
lastCount = count2; lastCount = count2;
last = now(); lastTime = now();
tfjs_esm_exports.dispose([resize, res]); tfjs_esm_exports.dispose([resize, res]);
resolve(cached[idx]); resolve(cached[idx]);
}); });
@ -4910,7 +4913,7 @@ var skipped3 = Number.MAX_SAFE_INTEGER;
var outputNodes; var outputNodes;
var cache = null; var cache = null;
var padding = [[0, 0], [0, 0], [0, 0], [0, 0]]; var padding = [[0, 0], [0, 0], [0, 0], [0, 0]];
var last2 = 0; var lastTime2 = 0;
async function loadDetect(config3) { async function loadDetect(config3) {
var _a, _b, _c; var _a, _b, _c;
if (env3.initial) if (env3.initial)
@ -5027,11 +5030,13 @@ async function detectParts(input, config3, outputSize2) {
} }
async function predict2(input, config3) { async function predict2(input, config3) {
const outputSize2 = [input.shape[2] || 0, input.shape[1] || 0]; const outputSize2 = [input.shape[2] || 0, input.shape[1] || 0];
if (skipped3 < (config3.body.skipFrames || 0) && (config3.body.skipTime || 0) <= now() - last2 && config3.skipFrame && cache !== null) { const skipTime = (config3.body.skipTime || 0) > now() - lastTime2;
const skipFrame = skipped3 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && cache !== null) {
skipped3++; skipped3++;
} else { } else {
cache = await detectParts(input, config3, outputSize2); cache = await detectParts(input, config3, outputSize2);
last2 = now(); lastTime2 = now();
skipped3 = 0; skipped3 = 0;
} }
if (cache) if (cache)
@ -5126,8 +5131,8 @@ var labels = [
// src/object/centernet.ts // src/object/centernet.ts
var model4; var model4;
var inputSize3 = 0; var inputSize3 = 0;
var last3 = []; var last = [];
var lastTime = 0; var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER; var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) { async function load4(config3) {
if (env.initial) if (env.initial)
@ -5192,21 +5197,23 @@ async function process3(res, outputShape, config3) {
return results; return results;
} }
async function predict3(input, config3) { async function predict3(input, config3) {
if (skipped4 < (config3.object.skipFrames || 0) && (config3.object.skipTime || 0) <= now() - lastTime && config3.skipFrame && last3.length > 0) { const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
skipped4++; skipped4++;
return last3; return last;
} }
skipped4 = 0; skipped4 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last3; return last;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [input.shape[2], input.shape[1]]; const outputSize2 = [input.shape[2], input.shape[1]];
const resize = tfjs_esm_exports.image.resizeBilinear(input, [inputSize3, inputSize3]); const resize = tfjs_esm_exports.image.resizeBilinear(input, [inputSize3, inputSize3]);
const objectT = config3.object.enabled ? model4 == null ? void 0 : model4.execute(resize, ["tower_0/detections"]) : null; const objectT = config3.object.enabled ? model4 == null ? void 0 : model4.execute(resize, ["tower_0/detections"]) : null;
lastTime = now(); lastTime3 = now();
tfjs_esm_exports.dispose(resize); tfjs_esm_exports.dispose(resize);
const obj = await process3(objectT, outputSize2, config3); const obj = await process3(objectT, outputSize2, config3);
last3 = obj; last = obj;
resolve(obj); resolve(obj);
}); });
} }
@ -5246,7 +5253,7 @@ var connected2 = {
// src/body/efficientpose.ts // src/body/efficientpose.ts
var model5; var model5;
var last4 = 0; var lastTime4 = 0;
var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} }; var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };
var skipped5 = Number.MAX_SAFE_INTEGER; var skipped5 = Number.MAX_SAFE_INTEGER;
async function load5(config3) { async function load5(config3) {
@ -5278,14 +5285,15 @@ function max2d(inputs, minScore) {
}); });
} }
async function predict4(image25, config3) { async function predict4(image25, config3) {
var _a; const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
if (skipped5 < (((_a = config3.body) == null ? void 0 : _a.skipFrames) || 0) && config3.skipFrame && Object.keys(cache2.keypoints).length > 0 && (config3.body.skipTime || 0) <= now() - last4) { const skipFrame = skipped5 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
skipped5++; skipped5++;
return [cache2]; return [cache2];
} }
skipped5 = 0; skipped5 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
var _a2; var _a;
const tensor3 = tfjs_esm_exports.tidy(() => { const tensor3 = tfjs_esm_exports.tidy(() => {
if (!(model5 == null ? void 0 : model5.inputs[0].shape)) if (!(model5 == null ? void 0 : model5.inputs[0].shape))
return null; return null;
@ -5297,7 +5305,7 @@ async function predict4(image25, config3) {
let resT; let resT;
if (config3.body.enabled) if (config3.body.enabled)
resT = await (model5 == null ? void 0 : model5.predict(tensor3)); resT = await (model5 == null ? void 0 : model5.predict(tensor3));
last4 = now(); lastTime4 = now();
tfjs_esm_exports.dispose(tensor3); tfjs_esm_exports.dispose(tensor3);
if (resT) { if (resT) {
cache2.keypoints.length = 0; cache2.keypoints.length = 0;
@ -5307,7 +5315,7 @@ async function predict4(image25, config3) {
tfjs_esm_exports.dispose(squeeze8); tfjs_esm_exports.dispose(squeeze8);
for (let id = 0; id < stack3.length; id++) { for (let id = 0; id < stack3.length; id++) {
const [x2, y2, partScore] = max2d(stack3[id], config3.body.minConfidence); const [x2, y2, partScore] = max2d(stack3[id], config3.body.minConfidence);
if (partScore > (((_a2 = config3.body) == null ? void 0 : _a2.minConfidence) || 0)) { if (partScore > (((_a = config3.body) == null ? void 0 : _a.minConfidence) || 0)) {
cache2.keypoints.push({ cache2.keypoints.push({
score: Math.round(100 * partScore) / 100, score: Math.round(100 * partScore) / 100,
part: kpt2[id], part: kpt2[id],
@ -5358,9 +5366,9 @@ async function predict4(image25, config3) {
// src/gear/emotion.ts // src/gear/emotion.ts
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"]; var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model6; var model6;
var last5 = []; var last2 = [];
var lastCount2 = 0; var lastCount2 = 0;
var lastTime2 = 0; var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER; var skipped6 = Number.MAX_SAFE_INTEGER;
var rgb = [0.2989, 0.587, 0.114]; var rgb = [0.2989, 0.587, 0.114];
async function load6(config3) { async function load6(config3) {
@ -5381,9 +5389,11 @@ async function predict5(image25, config3, idx, count2) {
var _a, _b; var _a, _b;
if (!model6) if (!model6)
return null; return null;
if (skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) <= now() - lastTime2 && config3.skipFrame && lastCount2 === count2 && last5[idx] && last5[idx].length > 0) { const skipFrame = skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last2[idx] && last2[idx].length > 0) {
skipped6++; skipped6++;
return last5[idx]; return last2[idx];
} }
skipped6 = 0; skipped6 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -5406,7 +5416,7 @@ async function predict5(image25, config3, idx, count2) {
const normalize = tfjs_esm_exports.tidy(() => tfjs_esm_exports.mul(tfjs_esm_exports.sub(grayscale, 0.5), 2)); const normalize = tfjs_esm_exports.tidy(() => tfjs_esm_exports.mul(tfjs_esm_exports.sub(grayscale, 0.5), 2));
tfjs_esm_exports.dispose(grayscale); tfjs_esm_exports.dispose(grayscale);
const emotionT = await (model6 == null ? void 0 : model6.predict(normalize)); const emotionT = await (model6 == null ? void 0 : model6.predict(normalize));
lastTime2 = now(); lastTime5 = now();
const data = await emotionT.data(); const data = await emotionT.data();
tfjs_esm_exports.dispose(emotionT); tfjs_esm_exports.dispose(emotionT);
for (let i = 0; i < data.length; i++) { for (let i = 0; i < data.length; i++) {
@ -5416,7 +5426,7 @@ async function predict5(image25, config3, idx, count2) {
obj.sort((a, b) => b.score - a.score); obj.sort((a, b) => b.score - a.score);
tfjs_esm_exports.dispose(normalize); tfjs_esm_exports.dispose(normalize);
} }
last5[idx] = obj; last2[idx] = obj;
lastCount2 = count2; lastCount2 = count2;
resolve(obj); resolve(obj);
}); });
@ -5559,13 +5569,15 @@ var boxCache = [];
var model8 = null; var model8 = null;
var inputSize5 = 0; var inputSize5 = 0;
var skipped7 = Number.MAX_SAFE_INTEGER; var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime3 = 0; var lastTime6 = 0;
var detectedFaces = 0; var detectedFaces = 0;
async function predict6(input, config3) { async function predict6(input, config3) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
if (!config3.skipFrame || (detectedFaces !== ((_a = config3.face.detector) == null ? void 0 : _a.maxDetected) || !((_b = config3.face.mesh) == null ? void 0 : _b.enabled)) && (skipped7 > (((_c = config3.face.detector) == null ? void 0 : _c.skipFrames) || 0) && (((_d = config3.face.description) == null ? void 0 : _d.skipTime) || 0) <= now() - lastTime3)) { const skipTime = (((_a = config3.face.detector) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes2 = await getBoxes(input, config3); const newBoxes2 = await getBoxes(input, config3);
lastTime3 = now(); lastTime6 = now();
boxCache = []; boxCache = [];
for (const possible of newBoxes2.boxes) { for (const possible of newBoxes2.boxes) {
const startPoint = await possible.box.startPoint.data(); const startPoint = await possible.box.startPoint.data();
@ -5601,16 +5613,16 @@ async function predict6(input, config3) {
faceScore: 0, faceScore: 0,
annotations: {} annotations: {}
}; };
if (((_e = config3.face.detector) == null ? void 0 : _e.rotation) && ((_f = config3.face.mesh) == null ? void 0 : _f.enabled) && env.kernels.includes("rotatewithoffset")) { if (((_c = config3.face.detector) == null ? void 0 : _c.rotation) && ((_d = config3.face.mesh) == null ? void 0 : _d.enabled) && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else { } else {
rotationMatrix = IDENTITY_MATRIX; rotationMatrix = IDENTITY_MATRIX;
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, ((_g = config3.face.mesh) == null ? void 0 : _g.enabled) ? [inputSize5, inputSize5] : [size(), size()]); const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
face5.tensor = tfjs_esm_exports.div(cut, 255); face5.tensor = tfjs_esm_exports.div(cut, 255);
tfjs_esm_exports.dispose(cut); tfjs_esm_exports.dispose(cut);
} }
face5.boxScore = Math.round(100 * box4.confidence) / 100; face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!((_h = config3.face.mesh) == null ? void 0 : _h.enabled)) { if (!((_f = config3.face.mesh) == null ? void 0 : _f.enabled)) {
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * box4.confidence || 0) / 100; face5.score = Math.round(100 * box4.confidence || 0) / 100;
@ -5633,17 +5645,17 @@ async function predict6(input, config3) {
let rawCoords = await coordsReshaped.array(); let rawCoords = await coordsReshaped.array();
tfjs_esm_exports.dispose(contourCoords); tfjs_esm_exports.dispose(contourCoords);
tfjs_esm_exports.dispose(coordsReshaped); tfjs_esm_exports.dispose(coordsReshaped);
if (faceConfidence < (((_i = config3.face.detector) == null ? void 0 : _i.minConfidence) || 1)) { if (faceConfidence < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
box4.confidence = faceConfidence; box4.confidence = faceConfidence;
} else { } else {
if ((_j = config3.face.iris) == null ? void 0 : _j.enabled) if ((_h = config3.face.iris) == null ? void 0 : _h.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5); rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5); face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]); face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence }; box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]); face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (((_k = config3.face.detector) == null ? void 0 : _k.rotation) && config3.face.mesh.enabled && ((_l = config3.face.description) == null ? void 0 : _l.enabled) && env.kernels.includes("rotatewithoffset")) { if (((_i = config3.face.detector) == null ? void 0 : _i.rotation) && config3.face.mesh.enabled && ((_j = config3.face.description) == null ? void 0 : _j.enabled) && env.kernels.includes("rotatewithoffset")) {
tfjs_esm_exports.dispose(face5.tensor); tfjs_esm_exports.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} }
@ -5657,7 +5669,7 @@ async function predict6(input, config3) {
faces.push(face5); faces.push(face5);
newBoxes.push(box4); newBoxes.push(box4);
} }
if ((_m = config3.face.mesh) == null ? void 0 : _m.enabled) if ((_k = config3.face.mesh) == null ? void 0 : _k.enabled)
boxCache = newBoxes.filter((a) => { boxCache = newBoxes.filter((a) => {
var _a2; var _a2;
return a.confidence > (((_a2 = config3.face.detector) == null ? void 0 : _a2.minConfidence) || 0); return a.confidence > (((_a2 = config3.face.detector) == null ? void 0 : _a2.minConfidence) || 0);
@ -5687,8 +5699,8 @@ var uvmap = UV468;
// src/face/faceres.ts // src/face/faceres.ts
var model9; var model9;
var last6 = []; var last3 = [];
var lastTime4 = 0; var lastTime7 = 0;
var lastCount3 = 0; var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER; var skipped8 = Number.MAX_SAFE_INTEGER;
async function load9(config3) { async function load9(config3) {
@ -5724,9 +5736,11 @@ async function predict7(image25, config3, idx, count2) {
var _a, _b, _c, _d; var _a, _b, _c, _d;
if (!model9) if (!model9)
return null; return null;
if (skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) <= now() - lastTime4 && config3.skipFrame && lastCount3 === count2 && ((_c = last6[idx]) == null ? void 0 : _c.age) && ((_d = last6[idx]) == null ? void 0 : _d.age) > 0) { const skipFrame = skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last3[idx]) == null ? void 0 : _c.age) && ((_d = last3[idx]) == null ? void 0 : _d.age) > 0) {
skipped8++; skipped8++;
return last6[idx]; return last3[idx];
} }
skipped8 = 0; skipped8 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -5740,7 +5754,7 @@ async function predict7(image25, config3, idx, count2) {
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) { if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
const enhanced = enhance(image25); const enhanced = enhance(image25);
const resT = await (model9 == null ? void 0 : model9.predict(enhanced)); const resT = await (model9 == null ? void 0 : model9.predict(enhanced));
lastTime4 = now(); lastTime7 = now();
tfjs_esm_exports.dispose(enhanced); tfjs_esm_exports.dispose(enhanced);
const genderT = await resT.find((t) => t.shape[1] === 1); const genderT = await resT.find((t) => t.shape[1] === 1);
const gender = await genderT.data(); const gender = await genderT.data();
@ -5760,7 +5774,7 @@ async function predict7(image25, config3, idx, count2) {
obj.descriptor = Array.from(descriptor); obj.descriptor = Array.from(descriptor);
resT.forEach((t) => tfjs_esm_exports.dispose(t)); resT.forEach((t) => tfjs_esm_exports.dispose(t));
} }
last6[idx] = obj; last3[idx] = obj;
lastCount3 = count2; lastCount3 = count2;
resolve(obj); resolve(obj);
}); });
@ -8907,7 +8921,7 @@ var handBoxEnlargeFactor = 1.65;
var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2]; var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
var palmLandmarksPalmBase = 0; var palmLandmarksPalmBase = 0;
var palmLandmarksMiddleFingerBase = 2; var palmLandmarksMiddleFingerBase = 2;
var lastTime5 = 0; var lastTime8 = 0;
var HandPipeline = class { var HandPipeline = class {
constructor(handDetector, handPoseModel2) { constructor(handDetector, handPoseModel2) {
__publicField(this, "handDetector"); __publicField(this, "handDetector");
@ -8920,7 +8934,7 @@ var HandPipeline = class {
this.handPoseModel = handPoseModel2; this.handPoseModel = handPoseModel2;
this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0; this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0;
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = Number.MAX_SAFE_INTEGER;
this.detectedHands = 0; this.detectedHands = 0;
} }
calculateLandmarksBoundingBox(landmarks) { calculateLandmarksBoundingBox(landmarks) {
@ -8972,11 +8986,13 @@ var HandPipeline = class {
async estimateHands(image25, config3) { async estimateHands(image25, config3) {
let useFreshBox = false; let useFreshBox = false;
let boxes; let boxes;
if (this.skipped === 0 || this.skipped > config3.hand.skipFrames && (config3.hand.skipTime || 0) <= now() - lastTime5 || !config3.hand.landmarks || !config3.skipFrame) { const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
boxes = await this.handDetector.estimateHandBounds(image25, config3); boxes = await this.handDetector.estimateHandBounds(image25, config3);
this.skipped = 0; this.skipped = 0;
} }
if (config3.skipFrame) if (config3.skipAllowed)
this.skipped++; this.skipped++;
if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config3.hand.maxDetected || !config3.hand.landmarks)) { if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config3.hand.maxDetected || !config3.hand.landmarks)) {
this.detectedHands = 0; this.detectedHands = 0;
@ -9001,7 +9017,7 @@ var HandPipeline = class {
tfjs_esm_exports.dispose(croppedInput); tfjs_esm_exports.dispose(croppedInput);
tfjs_esm_exports.dispose(rotatedImage); tfjs_esm_exports.dispose(rotatedImage);
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage); const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
lastTime5 = now(); lastTime8 = now();
tfjs_esm_exports.dispose(handImage); tfjs_esm_exports.dispose(handImage);
const confidence = (await confidenceT.data())[0]; const confidence = (await confidenceT.data())[0];
tfjs_esm_exports.dispose(confidenceT); tfjs_esm_exports.dispose(confidenceT);
@ -9569,8 +9585,8 @@ var faceIndex = 4;
var boxExpandFact = 1.6; var boxExpandFact = 1.6;
var maxDetectorResolution = 512; var maxDetectorResolution = 512;
var detectorExpandFact = 1.4; var detectorExpandFact = 1.4;
var skipped9 = 0; var skipped9 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0; var lastTime9 = 0;
var outputSize = [0, 0]; var outputSize = [0, 0];
var cache3 = { var cache3 = {
boxes: [], boxes: [],
@ -9710,17 +9726,21 @@ async function predict9(input, config3) {
return []; return [];
outputSize = [input.shape[2] || 0, input.shape[1] || 0]; outputSize = [input.shape[2] || 0, input.shape[1] || 0];
skipped9++; skipped9++;
if (config3.skipFrame && skipped9 <= (config3.hand.skipFrames || 0) && (config3.hand.skipTime || 0) <= now() - lastTime6) { const skipTime = (config3.hand.skipTime || 0) > now() - lastTime9;
const skipFrame = skipped9 < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
return cache3.hands; return cache3.hands;
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (config3.skipFrame && cache3.hands.length === config3.hand.maxDetected) { const skipTimeExtended = 3 * (config3.hand.skipTime || 0) > now() - lastTime9;
const skipFrameExtended = skipped9 < 3 * (config3.hand.skipFrames || 0);
if (config3.skipAllowed && cache3.hands.length === config3.hand.maxDetected) {
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
} else if (config3.skipFrame && skipped9 < 3 * (config3.hand.skipFrames || 0) && (config3.hand.skipTime || 0) <= 3 * (now() - lastTime6) && cache3.hands.length > 0) { } else if (config3.skipAllowed && skipTimeExtended && skipFrameExtended && cache3.hands.length > 0) {
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
} else { } else {
cache3.boxes = await detectHands(input, config3); cache3.boxes = await detectHands(input, config3);
lastTime6 = now(); lastTime9 = now();
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
skipped9 = 0; skipped9 = 0;
} }
@ -10015,10 +10035,12 @@ async function parseMultiPose(res, config3, image25, inputBox) {
async function predict10(input, config3) { async function predict10(input, config3) {
if (!model10 || !(model10 == null ? void 0 : model10.inputs[0].shape)) if (!model10 || !(model10 == null ? void 0 : model10.inputs[0].shape))
return []; return [];
if (!config3.skipFrame) if (!config3.skipAllowed)
cache5.boxes.length = 0; cache5.boxes.length = 0;
skipped10++; skipped10++;
if (config3.skipFrame && (skipped10 <= (config3.body.skipFrames || 0) && (config3.body.skipTime || 0) <= now() - cache5.last)) { const skipTime = (config3.body.skipTime || 0) > now() - cache5.last;
const skipFrame = skipped10 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
return cache5.bodies; return cache5.bodies;
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -10040,8 +10062,8 @@ async function predict10(input, config3) {
// src/object/nanodet.ts // src/object/nanodet.ts
var model11; var model11;
var last7 = []; var last4 = [];
var lastTime7 = 0; var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER; var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5; var scaleBox = 2.5;
async function load12(config3) { async function load12(config3) {
@ -10121,13 +10143,15 @@ async function process4(res, inputSize8, outputShape, config3) {
return results; return results;
} }
async function predict11(image25, config3) { async function predict11(image25, config3) {
if (skipped11 < (config3.object.skipFrames || 0) && (config3.object.skipTime || 0) <= now() - lastTime7 && config3.skipFrame && last7.length > 0) { const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
skipped11++; skipped11++;
return last7; return last4;
} }
skipped11 = 0; skipped11 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last7; return last4;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]]; const outputSize2 = [image25.shape[2], image25.shape[1]];
const resize = tfjs_esm_exports.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false); const resize = tfjs_esm_exports.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
@ -10138,10 +10162,10 @@ async function predict11(image25, config3) {
let objectT; let objectT;
if (config3.object.enabled) if (config3.object.enabled)
objectT = await model11.predict(transpose); objectT = await model11.predict(transpose);
lastTime7 = now(); lastTime10 = now();
tfjs_esm_exports.dispose(transpose); tfjs_esm_exports.dispose(transpose);
const obj = await process4(objectT, model11.inputSize, outputSize2, config3); const obj = await process4(objectT, model11.inputSize, outputSize2, config3);
last7 = obj; last4 = obj;
resolve(obj); resolve(obj);
}); });
} }
@ -11336,7 +11360,7 @@ async function canvas2(input, output) {
async function all(inCanvas2, result, drawOptions) { async function all(inCanvas2, result, drawOptions) {
if (!result || !result.performance || !result || !inCanvas2) if (!result || !result.performance || !result || !inCanvas2)
return null; return null;
const timestamp = now(); const timeStamp = now();
const localOptions = mergeDeep(options2, drawOptions); const localOptions = mergeDeep(options2, drawOptions);
const promise = Promise.all([ const promise = Promise.all([
face(inCanvas2, result.face, localOptions), face(inCanvas2, result.face, localOptions),
@ -11345,7 +11369,7 @@ async function all(inCanvas2, result, drawOptions) {
object(inCanvas2, result.object, localOptions), object(inCanvas2, result.object, localOptions),
gesture(inCanvas2, result.gesture, localOptions) gesture(inCanvas2, result.gesture, localOptions)
]); ]);
result.performance.draw = Math.trunc(now() - timestamp); result.performance.draw = env.perfadd ? (result.performance.draw || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
return promise; return promise;
} }
@ -11470,7 +11494,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:face"; parent.state = "run:face";
timeStamp = now(); timeStamp = now();
const faces = await predict6(input, parent.config); const faces = await predict6(input, parent.config);
parent.performance.face = Math.trunc(now() - timeStamp); parent.performance.face = env.perfadd ? (parent.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (!input.shape || input.shape.length !== 4) if (!input.shape || input.shape.length !== 4)
return []; return [];
if (!faces) if (!faces)
@ -11489,7 +11513,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:emotion"; parent.state = "run:emotion";
timeStamp = now(); timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict5(faces[i].tensor || tfjs_esm_exports.tensor([]), parent.config, i, faces.length) : null; emotionRes = parent.config.face.emotion.enabled ? await predict5(faces[i].tensor || tfjs_esm_exports.tensor([]), parent.config, i, faces.length) : null;
parent.performance.emotion = Math.trunc(now() - timeStamp); parent.performance.emotion = env.perfadd ? (parent.performance.emotion || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End Emotion:"); parent.analyze("End Emotion:");
parent.analyze("Start AntiSpoof:"); parent.analyze("Start AntiSpoof:");
@ -11499,7 +11523,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:antispoof"; parent.state = "run:antispoof";
timeStamp = now(); timeStamp = now();
antispoofRes = parent.config.face.antispoof.enabled ? await predict(faces[i].tensor || tfjs_esm_exports.tensor([]), parent.config, i, faces.length) : null; antispoofRes = parent.config.face.antispoof.enabled ? await predict(faces[i].tensor || tfjs_esm_exports.tensor([]), parent.config, i, faces.length) : null;
parent.performance.antispoof = Math.trunc(now() - timeStamp); parent.performance.antispoof = env.perfadd ? (parent.performance.antispoof || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End AntiSpoof:"); parent.analyze("End AntiSpoof:");
parent.analyze("Start Description:"); parent.analyze("Start Description:");
@ -11509,7 +11533,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:description"; parent.state = "run:description";
timeStamp = now(); timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict7(faces[i].tensor || tfjs_esm_exports.tensor([]), parent.config, i, faces.length) : null; descRes = parent.config.face.description.enabled ? await predict7(faces[i].tensor || tfjs_esm_exports.tensor([]), parent.config, i, faces.length) : null;
parent.performance.embedding = Math.trunc(now() - timeStamp); parent.performance.embedding = env.perfadd ? (parent.performance.embedding || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End Description:"); parent.analyze("End Description:");
if (parent.config.async) { if (parent.config.async) {
@ -12827,7 +12851,7 @@ var Human = class {
const count2 = Object.values(this.models).filter((model14) => model14).length; const count2 = Object.values(this.models).filter((model14) => model14).length;
if (userConfig) if (userConfig)
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (env.initial) { if (this.env.initial) {
if (this.config.debug) if (this.config.debug)
log(`version: ${this.version}`); log(`version: ${this.version}`);
if (this.config.debug) if (this.config.debug)
@ -12843,9 +12867,9 @@ var Human = class {
} }
} }
await load15(this); await load15(this);
if (env.initial && this.config.debug) if (this.env.initial && this.config.debug)
log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors"); log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors");
env.initial = false; this.env.initial = false;
const loaded = Object.values(this.models).filter((model14) => model14).length; const loaded = Object.values(this.models).filter((model14) => model14).length;
if (loaded !== count2) { if (loaded !== count2) {
await validate2(this); await validate2(this);
@ -12853,7 +12877,7 @@ var Human = class {
} }
const current = Math.trunc(now() - timeStamp); const current = Math.trunc(now() - timeStamp);
if (current > (this.performance.load || 0)) if (current > (this.performance.load || 0))
this.performance.load = current; this.performance.load = this.env.perfadd ? (this.performance.load || 0) + current : current;
} }
next(result = this.result) { next(result = this.result) {
return calc2(result, this.config); return calc2(result, this.config);
@ -12867,7 +12891,6 @@ var Human = class {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
this.state = "config"; this.state = "config";
let timeStamp; let timeStamp;
let elapsedTime;
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
this.state = "check"; this.state = "check";
const error = __privateGet(this, _sanity).call(this, input); const error = __privateGet(this, _sanity).call(this, input);
@ -12882,7 +12905,7 @@ var Human = class {
this.state = "image"; this.state = "image";
const img = process2(input, this.config); const img = process2(input, this.config);
this.process = img; this.process = img;
this.performance.image = Math.trunc(now() - timeStamp); this.performance.image = this.env.perfadd ? (this.performance.image || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:"); this.analyze("Get Image:");
if (!img.tensor) { if (!img.tensor) {
if (this.config.debug) if (this.config.debug)
@ -12892,15 +12915,15 @@ var Human = class {
} }
this.emit("image"); this.emit("image");
timeStamp = now(); timeStamp = now();
this.config.skipFrame = await skip(this.config, img.tensor); this.config.skipAllowed = await skip(this.config, img.tensor);
if (!this.performance.frames) if (!this.performance.frames)
this.performance.frames = 0; this.performance.frames = 0;
if (!this.performance.cached) if (!this.performance.cached)
this.performance.cached = 0; this.performance.cached = 0;
this.performance.frames++; this.performance.frames++;
if (this.config.skipFrame) if (this.config.skipAllowed)
this.performance.cached++; this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp); this.performance.changed = this.env.perfadd ? (this.performance.changed || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:"); this.analyze("Check Changed:");
let faceRes = []; let faceRes = [];
let bodyRes = []; let bodyRes = [];
@ -12914,9 +12937,7 @@ var Human = class {
} else { } else {
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.face = this.env.perfadd ? (this.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.face = elapsedTime;
} }
if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1)) if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1))
faceRes = await faceRes; faceRes = await faceRes;
@ -12944,9 +12965,7 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict4(img.tensor, bodyConfig) : []; bodyRes = this.config.body.enabled ? await predict4(img.tensor, bodyConfig) : [];
else if ((_h = this.config.body.modelPath) == null ? void 0 : _h.includes("movenet")) else if ((_h = this.config.body.modelPath) == null ? void 0 : _h.includes("movenet"))
bodyRes = this.config.body.enabled ? await predict10(img.tensor, bodyConfig) : []; bodyRes = this.config.body.enabled ? await predict10(img.tensor, bodyConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.body = this.env.perfadd ? (this.performance.body || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.body = elapsedTime;
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
@ -12965,9 +12984,7 @@ var Human = class {
handRes = this.config.hand.enabled ? await predict8(img.tensor, handConfig) : []; handRes = this.config.hand.enabled ? await predict8(img.tensor, handConfig) : [];
else if ((_p = (_o = this.config.hand.detector) == null ? void 0 : _o.modelPath) == null ? void 0 : _p.includes("handtrack")) else if ((_p = (_o = this.config.hand.detector) == null ? void 0 : _o.modelPath) == null ? void 0 : _p.includes("handtrack"))
handRes = this.config.hand.enabled ? await predict9(img.tensor, handConfig) : []; handRes = this.config.hand.enabled ? await predict9(img.tensor, handConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.hand = this.env.perfadd ? (this.performance.hand || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.hand = elapsedTime;
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
@ -12985,9 +13002,7 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict11(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict11(img.tensor, this.config) : [];
else if ((_t = this.config.object.modelPath) == null ? void 0 : _t.includes("centernet")) else if ((_t = this.config.object.modelPath) == null ? void 0 : _t.includes("centernet"))
objectRes = this.config.object.enabled ? await predict3(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict3(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.object = this.env.perfadd ? (this.performance.object || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
this.state = "detect:await"; this.state = "detect:await";
@ -12999,7 +13014,7 @@ var Human = class {
timeStamp = now(); timeStamp = now();
gestureRes = [...face2(faceRes), ...body2(bodyRes), ...hand2(handRes), ...iris3(faceRes)]; gestureRes = [...face2(faceRes), ...body2(bodyRes), ...hand2(handRes), ...iris3(faceRes)];
if (!this.config.async) if (!this.config.async)
this.performance.gesture = Math.trunc(now() - timeStamp); this.performance.gesture = this.env.perfadd ? (this.performance.gesture || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
else if (this.performance.gesture) else if (this.performance.gesture)
delete this.performance.gesture; delete this.performance.gesture;
} }

File diff suppressed because one or more lines are too long

223
dist/human.esm.js vendored
View File

@ -104,8 +104,8 @@ var config = {
debug: true, debug: true,
async: true, async: true,
warmup: "full", warmup: "full",
cacheSensitivity: 0.75, cacheSensitivity: 0.7,
skipFrame: false, skipAllowed: false,
filter: { filter: {
enabled: true, enabled: true,
width: 0, width: 0,
@ -135,8 +135,8 @@ var config = {
modelPath: "blazeface.json", modelPath: "blazeface.json",
rotation: true, rotation: true,
maxDetected: 1, maxDetected: 1,
skipFrames: 11, skipFrames: 99,
skipTime: 2e3, skipTime: 2500,
minConfidence: 0.2, minConfidence: 0.2,
iouThreshold: 0.1, iouThreshold: 0.1,
return: false return: false
@ -152,21 +152,21 @@ var config = {
emotion: { emotion: {
enabled: true, enabled: true,
minConfidence: 0.1, minConfidence: 0.1,
skipFrames: 12, skipFrames: 99,
skipTime: 2e3, skipTime: 1500,
modelPath: "emotion.json" modelPath: "emotion.json"
}, },
description: { description: {
enabled: true, enabled: true,
modelPath: "faceres.json", modelPath: "faceres.json",
skipFrames: 13, skipFrames: 99,
skipTime: 2e3, skipTime: 3e3,
minConfidence: 0.1 minConfidence: 0.1
}, },
antispoof: { antispoof: {
enabled: false, enabled: false,
skipFrames: 14, skipFrames: 99,
skipTime: 2e3, skipTime: 4e3,
modelPath: "antispoof.json" modelPath: "antispoof.json"
} }
}, },
@ -179,12 +179,12 @@ var config = {
maxDetected: -1, maxDetected: -1,
minConfidence: 0.3, minConfidence: 0.3,
skipFrames: 1, skipFrames: 1,
skipTime: 2e3 skipTime: 200
}, },
hand: { hand: {
enabled: true, enabled: true,
rotation: true, rotation: true,
skipFrames: 2, skipFrames: 99,
skipTime: 2e3, skipTime: 2e3,
minConfidence: 0.5, minConfidence: 0.5,
iouThreshold: 0.2, iouThreshold: 0.2,
@ -203,8 +203,8 @@ var config = {
minConfidence: 0.2, minConfidence: 0.2,
iouThreshold: 0.4, iouThreshold: 0.4,
maxDetected: 10, maxDetected: 10,
skipFrames: 15, skipFrames: 99,
skipTime: 2e3 skipTime: 1e3
}, },
segmentation: { segmentation: {
enabled: false, enabled: false,
@ -34798,6 +34798,7 @@ var Env = class {
__publicField(this, "filter"); __publicField(this, "filter");
__publicField(this, "tfjs"); __publicField(this, "tfjs");
__publicField(this, "offscreen"); __publicField(this, "offscreen");
__publicField(this, "perfadd", false);
__publicField(this, "wasm", { __publicField(this, "wasm", {
supported: void 0, supported: void 0,
backend: void 0, backend: void 0,
@ -34921,7 +34922,7 @@ var model2;
var cached = []; var cached = [];
var skipped2 = Number.MAX_SAFE_INTEGER; var skipped2 = Number.MAX_SAFE_INTEGER;
var lastCount = 0; var lastCount = 0;
var last = 0; var lastTime = 0;
async function load2(config3) { async function load2(config3) {
var _a2, _b; var _a2, _b;
if (env.initial) if (env.initial)
@ -34940,7 +34941,9 @@ async function predict(image6, config3, idx, count2) {
var _a2, _b; var _a2, _b;
if (!model2) if (!model2)
return null; return null;
if (skipped2 < (((_a2 = config3.face.antispoof) == null ? void 0 : _a2.skipFrames) || 0) && (((_b = config3.face.antispoof) == null ? void 0 : _b.skipTime) || 0) <= now() - last && config3.skipFrame && lastCount === count2 && cached[idx]) { const skipTime = (((_a2 = config3.face.antispoof) == null ? void 0 : _a2.skipTime) || 0) > now() - lastTime;
const skipFrame = skipped2 < (((_b = config3.face.antispoof) == null ? void 0 : _b.skipFrames) || 0);
if (config3.skipAllowed && skipTime && skipFrame && lastCount === count2 && cached[idx]) {
skipped2++; skipped2++;
return cached[idx]; return cached[idx];
} }
@ -34951,7 +34954,7 @@ async function predict(image6, config3, idx, count2) {
const num = (await res.data())[0]; const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100; cached[idx] = Math.round(100 * num) / 100;
lastCount = count2; lastCount = count2;
last = now(); lastTime = now();
De([resize, res]); De([resize, res]);
resolve(cached[idx]); resolve(cached[idx]);
}); });
@ -38527,7 +38530,7 @@ var skipped3 = Number.MAX_SAFE_INTEGER;
var outputNodes; var outputNodes;
var cache = null; var cache = null;
var padding = [[0, 0], [0, 0], [0, 0], [0, 0]]; var padding = [[0, 0], [0, 0], [0, 0], [0, 0]];
var last2 = 0; var lastTime2 = 0;
async function loadDetect(config3) { async function loadDetect(config3) {
var _a2, _b, _c2; var _a2, _b, _c2;
if (env2.initial) if (env2.initial)
@ -38644,11 +38647,13 @@ async function detectParts(input, config3, outputSize2) {
} }
async function predict2(input, config3) { async function predict2(input, config3) {
const outputSize2 = [input.shape[2] || 0, input.shape[1] || 0]; const outputSize2 = [input.shape[2] || 0, input.shape[1] || 0];
if (skipped3 < (config3.body.skipFrames || 0) && (config3.body.skipTime || 0) <= now() - last2 && config3.skipFrame && cache !== null) { const skipTime = (config3.body.skipTime || 0) > now() - lastTime2;
const skipFrame = skipped3 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && cache !== null) {
skipped3++; skipped3++;
} else { } else {
cache = await detectParts(input, config3, outputSize2); cache = await detectParts(input, config3, outputSize2);
last2 = now(); lastTime2 = now();
skipped3 = 0; skipped3 = 0;
} }
if (cache) if (cache)
@ -38743,8 +38748,8 @@ var labels = [
// src/object/centernet.ts // src/object/centernet.ts
var model4; var model4;
var inputSize3 = 0; var inputSize3 = 0;
var last3 = []; var last = [];
var lastTime = 0; var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER; var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) { async function load4(config3) {
if (env.initial) if (env.initial)
@ -38809,21 +38814,23 @@ async function process3(res, outputShape, config3) {
return results; return results;
} }
async function predict3(input, config3) { async function predict3(input, config3) {
if (skipped4 < (config3.object.skipFrames || 0) && (config3.object.skipTime || 0) <= now() - lastTime && config3.skipFrame && last3.length > 0) { const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
skipped4++; skipped4++;
return last3; return last;
} }
skipped4 = 0; skipped4 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last3; return last;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [input.shape[2], input.shape[1]]; const outputSize2 = [input.shape[2], input.shape[1]];
const resize = Cn.resizeBilinear(input, [inputSize3, inputSize3]); const resize = Cn.resizeBilinear(input, [inputSize3, inputSize3]);
const objectT = config3.object.enabled ? model4 == null ? void 0 : model4.execute(resize, ["tower_0/detections"]) : null; const objectT = config3.object.enabled ? model4 == null ? void 0 : model4.execute(resize, ["tower_0/detections"]) : null;
lastTime = now(); lastTime3 = now();
De(resize); De(resize);
const obj = await process3(objectT, outputSize2, config3); const obj = await process3(objectT, outputSize2, config3);
last3 = obj; last = obj;
resolve(obj); resolve(obj);
}); });
} }
@ -38863,7 +38870,7 @@ var connected2 = {
// src/body/efficientpose.ts // src/body/efficientpose.ts
var model5; var model5;
var last4 = 0; var lastTime4 = 0;
var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} }; var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };
var skipped5 = Number.MAX_SAFE_INTEGER; var skipped5 = Number.MAX_SAFE_INTEGER;
async function load5(config3) { async function load5(config3) {
@ -38895,14 +38902,15 @@ function max2d(inputs, minScore) {
}); });
} }
async function predict4(image6, config3) { async function predict4(image6, config3) {
var _a2; const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
if (skipped5 < (((_a2 = config3.body) == null ? void 0 : _a2.skipFrames) || 0) && config3.skipFrame && Object.keys(cache2.keypoints).length > 0 && (config3.body.skipTime || 0) <= now() - last4) { const skipFrame = skipped5 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
skipped5++; skipped5++;
return [cache2]; return [cache2];
} }
skipped5 = 0; skipped5 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
var _a3; var _a2;
const tensor = V(() => { const tensor = V(() => {
if (!(model5 == null ? void 0 : model5.inputs[0].shape)) if (!(model5 == null ? void 0 : model5.inputs[0].shape))
return null; return null;
@ -38914,7 +38922,7 @@ async function predict4(image6, config3) {
let resT; let resT;
if (config3.body.enabled) if (config3.body.enabled)
resT = await (model5 == null ? void 0 : model5.predict(tensor)); resT = await (model5 == null ? void 0 : model5.predict(tensor));
last4 = now(); lastTime4 = now();
De(tensor); De(tensor);
if (resT) { if (resT) {
cache2.keypoints.length = 0; cache2.keypoints.length = 0;
@ -38924,7 +38932,7 @@ async function predict4(image6, config3) {
De(squeeze); De(squeeze);
for (let id2 = 0; id2 < stack.length; id2++) { for (let id2 = 0; id2 < stack.length; id2++) {
const [x7, y7, partScore] = max2d(stack[id2], config3.body.minConfidence); const [x7, y7, partScore] = max2d(stack[id2], config3.body.minConfidence);
if (partScore > (((_a3 = config3.body) == null ? void 0 : _a3.minConfidence) || 0)) { if (partScore > (((_a2 = config3.body) == null ? void 0 : _a2.minConfidence) || 0)) {
cache2.keypoints.push({ cache2.keypoints.push({
score: Math.round(100 * partScore) / 100, score: Math.round(100 * partScore) / 100,
part: kpt2[id2], part: kpt2[id2],
@ -38975,9 +38983,9 @@ async function predict4(image6, config3) {
// src/gear/emotion.ts // src/gear/emotion.ts
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"]; var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model6; var model6;
var last5 = []; var last2 = [];
var lastCount2 = 0; var lastCount2 = 0;
var lastTime2 = 0; var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER; var skipped6 = Number.MAX_SAFE_INTEGER;
var rgb = [0.2989, 0.587, 0.114]; var rgb = [0.2989, 0.587, 0.114];
async function load6(config3) { async function load6(config3) {
@ -38998,9 +39006,11 @@ async function predict5(image6, config3, idx, count2) {
var _a2, _b; var _a2, _b;
if (!model6) if (!model6)
return null; return null;
if (skipped6 < (((_a2 = config3.face.emotion) == null ? void 0 : _a2.skipFrames) || 0) && (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) <= now() - lastTime2 && config3.skipFrame && lastCount2 === count2 && last5[idx] && last5[idx].length > 0) { const skipFrame = skipped6 < (((_a2 = config3.face.emotion) == null ? void 0 : _a2.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last2[idx] && last2[idx].length > 0) {
skipped6++; skipped6++;
return last5[idx]; return last2[idx];
} }
skipped6 = 0; skipped6 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -39023,7 +39033,7 @@ async function predict5(image6, config3, idx, count2) {
const normalize = V(() => O(le(grayscale, 0.5), 2)); const normalize = V(() => O(le(grayscale, 0.5), 2));
De(grayscale); De(grayscale);
const emotionT = await (model6 == null ? void 0 : model6.predict(normalize)); const emotionT = await (model6 == null ? void 0 : model6.predict(normalize));
lastTime2 = now(); lastTime5 = now();
const data = await emotionT.data(); const data = await emotionT.data();
De(emotionT); De(emotionT);
for (let i = 0; i < data.length; i++) { for (let i = 0; i < data.length; i++) {
@ -39033,7 +39043,7 @@ async function predict5(image6, config3, idx, count2) {
obj.sort((a, b10) => b10.score - a.score); obj.sort((a, b10) => b10.score - a.score);
De(normalize); De(normalize);
} }
last5[idx] = obj; last2[idx] = obj;
lastCount2 = count2; lastCount2 = count2;
resolve(obj); resolve(obj);
}); });
@ -39176,13 +39186,15 @@ var boxCache = [];
var model8 = null; var model8 = null;
var inputSize5 = 0; var inputSize5 = 0;
var skipped7 = Number.MAX_SAFE_INTEGER; var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime3 = 0; var lastTime6 = 0;
var detectedFaces = 0; var detectedFaces = 0;
async function predict6(input, config3) { async function predict6(input, config3) {
var _a2, _b, _c2, _d2, _e, _f2, _g, _h2, _i2, _j2, _k2, _l2, _m2; var _a2, _b, _c2, _d2, _e, _f2, _g, _h2, _i2, _j2, _k2;
if (!config3.skipFrame || (detectedFaces !== ((_a2 = config3.face.detector) == null ? void 0 : _a2.maxDetected) || !((_b = config3.face.mesh) == null ? void 0 : _b.enabled)) && (skipped7 > (((_c2 = config3.face.detector) == null ? void 0 : _c2.skipFrames) || 0) && (((_d2 = config3.face.description) == null ? void 0 : _d2.skipTime) || 0) <= now() - lastTime3)) { const skipTime = (((_a2 = config3.face.detector) == null ? void 0 : _a2.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes2 = await getBoxes(input, config3); const newBoxes2 = await getBoxes(input, config3);
lastTime3 = now(); lastTime6 = now();
boxCache = []; boxCache = [];
for (const possible of newBoxes2.boxes) { for (const possible of newBoxes2.boxes) {
const startPoint = await possible.box.startPoint.data(); const startPoint = await possible.box.startPoint.data();
@ -39218,16 +39230,16 @@ async function predict6(input, config3) {
faceScore: 0, faceScore: 0,
annotations: {} annotations: {}
}; };
if (((_e = config3.face.detector) == null ? void 0 : _e.rotation) && ((_f2 = config3.face.mesh) == null ? void 0 : _f2.enabled) && env.kernels.includes("rotatewithoffset")) { if (((_c2 = config3.face.detector) == null ? void 0 : _c2.rotation) && ((_d2 = config3.face.mesh) == null ? void 0 : _d2.enabled) && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else { } else {
rotationMatrix = IDENTITY_MATRIX; rotationMatrix = IDENTITY_MATRIX;
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, ((_g = config3.face.mesh) == null ? void 0 : _g.enabled) ? [inputSize5, inputSize5] : [size(), size()]); const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
face5.tensor = ce(cut, 255); face5.tensor = ce(cut, 255);
De(cut); De(cut);
} }
face5.boxScore = Math.round(100 * box4.confidence) / 100; face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!((_h2 = config3.face.mesh) == null ? void 0 : _h2.enabled)) { if (!((_f2 = config3.face.mesh) == null ? void 0 : _f2.enabled)) {
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * box4.confidence || 0) / 100; face5.score = Math.round(100 * box4.confidence || 0) / 100;
@ -39250,17 +39262,17 @@ async function predict6(input, config3) {
let rawCoords = await coordsReshaped.array(); let rawCoords = await coordsReshaped.array();
De(contourCoords); De(contourCoords);
De(coordsReshaped); De(coordsReshaped);
if (faceConfidence < (((_i2 = config3.face.detector) == null ? void 0 : _i2.minConfidence) || 1)) { if (faceConfidence < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
box4.confidence = faceConfidence; box4.confidence = faceConfidence;
} else { } else {
if ((_j2 = config3.face.iris) == null ? void 0 : _j2.enabled) if ((_h2 = config3.face.iris) == null ? void 0 : _h2.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5); rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5); face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]); face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence }; box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]); face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (((_k2 = config3.face.detector) == null ? void 0 : _k2.rotation) && config3.face.mesh.enabled && ((_l2 = config3.face.description) == null ? void 0 : _l2.enabled) && env.kernels.includes("rotatewithoffset")) { if (((_i2 = config3.face.detector) == null ? void 0 : _i2.rotation) && config3.face.mesh.enabled && ((_j2 = config3.face.description) == null ? void 0 : _j2.enabled) && env.kernels.includes("rotatewithoffset")) {
De(face5.tensor); De(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} }
@ -39274,7 +39286,7 @@ async function predict6(input, config3) {
faces.push(face5); faces.push(face5);
newBoxes.push(box4); newBoxes.push(box4);
} }
if ((_m2 = config3.face.mesh) == null ? void 0 : _m2.enabled) if ((_k2 = config3.face.mesh) == null ? void 0 : _k2.enabled)
boxCache = newBoxes.filter((a) => { boxCache = newBoxes.filter((a) => {
var _a3; var _a3;
return a.confidence > (((_a3 = config3.face.detector) == null ? void 0 : _a3.minConfidence) || 0); return a.confidence > (((_a3 = config3.face.detector) == null ? void 0 : _a3.minConfidence) || 0);
@ -39304,8 +39316,8 @@ var uvmap = UV468;
// src/face/faceres.ts // src/face/faceres.ts
var model9; var model9;
var last6 = []; var last3 = [];
var lastTime4 = 0; var lastTime7 = 0;
var lastCount3 = 0; var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER; var skipped8 = Number.MAX_SAFE_INTEGER;
async function load9(config3) { async function load9(config3) {
@ -39341,9 +39353,11 @@ async function predict7(image6, config3, idx, count2) {
var _a2, _b, _c2, _d2; var _a2, _b, _c2, _d2;
if (!model9) if (!model9)
return null; return null;
if (skipped8 < (((_a2 = config3.face.description) == null ? void 0 : _a2.skipFrames) || 0) && (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) <= now() - lastTime4 && config3.skipFrame && lastCount3 === count2 && ((_c2 = last6[idx]) == null ? void 0 : _c2.age) && ((_d2 = last6[idx]) == null ? void 0 : _d2.age) > 0) { const skipFrame = skipped8 < (((_a2 = config3.face.description) == null ? void 0 : _a2.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c2 = last3[idx]) == null ? void 0 : _c2.age) && ((_d2 = last3[idx]) == null ? void 0 : _d2.age) > 0) {
skipped8++; skipped8++;
return last6[idx]; return last3[idx];
} }
skipped8 = 0; skipped8 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -39357,7 +39371,7 @@ async function predict7(image6, config3, idx, count2) {
if ((_a3 = config3.face.description) == null ? void 0 : _a3.enabled) { if ((_a3 = config3.face.description) == null ? void 0 : _a3.enabled) {
const enhanced = enhance(image6); const enhanced = enhance(image6);
const resT = await (model9 == null ? void 0 : model9.predict(enhanced)); const resT = await (model9 == null ? void 0 : model9.predict(enhanced));
lastTime4 = now(); lastTime7 = now();
De(enhanced); De(enhanced);
const genderT = await resT.find((t) => t.shape[1] === 1); const genderT = await resT.find((t) => t.shape[1] === 1);
const gender = await genderT.data(); const gender = await genderT.data();
@ -39377,7 +39391,7 @@ async function predict7(image6, config3, idx, count2) {
obj.descriptor = Array.from(descriptor); obj.descriptor = Array.from(descriptor);
resT.forEach((t) => De(t)); resT.forEach((t) => De(t));
} }
last6[idx] = obj; last3[idx] = obj;
lastCount3 = count2; lastCount3 = count2;
resolve(obj); resolve(obj);
}); });
@ -42524,7 +42538,7 @@ var handBoxEnlargeFactor = 1.65;
var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2]; var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
var palmLandmarksPalmBase = 0; var palmLandmarksPalmBase = 0;
var palmLandmarksMiddleFingerBase = 2; var palmLandmarksMiddleFingerBase = 2;
var lastTime5 = 0; var lastTime8 = 0;
var HandPipeline = class { var HandPipeline = class {
constructor(handDetector, handPoseModel2) { constructor(handDetector, handPoseModel2) {
__publicField(this, "handDetector"); __publicField(this, "handDetector");
@ -42537,7 +42551,7 @@ var HandPipeline = class {
this.handPoseModel = handPoseModel2; this.handPoseModel = handPoseModel2;
this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0; this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0;
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = Number.MAX_SAFE_INTEGER;
this.detectedHands = 0; this.detectedHands = 0;
} }
calculateLandmarksBoundingBox(landmarks) { calculateLandmarksBoundingBox(landmarks) {
@ -42589,11 +42603,13 @@ var HandPipeline = class {
async estimateHands(image6, config3) { async estimateHands(image6, config3) {
let useFreshBox = false; let useFreshBox = false;
let boxes; let boxes;
if (this.skipped === 0 || this.skipped > config3.hand.skipFrames && (config3.hand.skipTime || 0) <= now() - lastTime5 || !config3.hand.landmarks || !config3.skipFrame) { const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
boxes = await this.handDetector.estimateHandBounds(image6, config3); boxes = await this.handDetector.estimateHandBounds(image6, config3);
this.skipped = 0; this.skipped = 0;
} }
if (config3.skipFrame) if (config3.skipAllowed)
this.skipped++; this.skipped++;
if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config3.hand.maxDetected || !config3.hand.landmarks)) { if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config3.hand.maxDetected || !config3.hand.landmarks)) {
this.detectedHands = 0; this.detectedHands = 0;
@ -42618,7 +42634,7 @@ var HandPipeline = class {
De(croppedInput); De(croppedInput);
De(rotatedImage); De(rotatedImage);
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage); const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
lastTime5 = now(); lastTime8 = now();
De(handImage); De(handImage);
const confidence = (await confidenceT.data())[0]; const confidence = (await confidenceT.data())[0];
De(confidenceT); De(confidenceT);
@ -43186,8 +43202,8 @@ var faceIndex = 4;
var boxExpandFact = 1.6; var boxExpandFact = 1.6;
var maxDetectorResolution = 512; var maxDetectorResolution = 512;
var detectorExpandFact = 1.4; var detectorExpandFact = 1.4;
var skipped9 = 0; var skipped9 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0; var lastTime9 = 0;
var outputSize = [0, 0]; var outputSize = [0, 0];
var cache3 = { var cache3 = {
boxes: [], boxes: [],
@ -43327,17 +43343,21 @@ async function predict9(input, config3) {
return []; return [];
outputSize = [input.shape[2] || 0, input.shape[1] || 0]; outputSize = [input.shape[2] || 0, input.shape[1] || 0];
skipped9++; skipped9++;
if (config3.skipFrame && skipped9 <= (config3.hand.skipFrames || 0) && (config3.hand.skipTime || 0) <= now() - lastTime6) { const skipTime = (config3.hand.skipTime || 0) > now() - lastTime9;
const skipFrame = skipped9 < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
return cache3.hands; return cache3.hands;
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (config3.skipFrame && cache3.hands.length === config3.hand.maxDetected) { const skipTimeExtended = 3 * (config3.hand.skipTime || 0) > now() - lastTime9;
const skipFrameExtended = skipped9 < 3 * (config3.hand.skipFrames || 0);
if (config3.skipAllowed && cache3.hands.length === config3.hand.maxDetected) {
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
} else if (config3.skipFrame && skipped9 < 3 * (config3.hand.skipFrames || 0) && (config3.hand.skipTime || 0) <= 3 * (now() - lastTime6) && cache3.hands.length > 0) { } else if (config3.skipAllowed && skipTimeExtended && skipFrameExtended && cache3.hands.length > 0) {
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
} else { } else {
cache3.boxes = await detectHands(input, config3); cache3.boxes = await detectHands(input, config3);
lastTime6 = now(); lastTime9 = now();
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
skipped9 = 0; skipped9 = 0;
} }
@ -43632,10 +43652,12 @@ async function parseMultiPose(res, config3, image6, inputBox) {
async function predict10(input, config3) { async function predict10(input, config3) {
if (!model10 || !(model10 == null ? void 0 : model10.inputs[0].shape)) if (!model10 || !(model10 == null ? void 0 : model10.inputs[0].shape))
return []; return [];
if (!config3.skipFrame) if (!config3.skipAllowed)
cache5.boxes.length = 0; cache5.boxes.length = 0;
skipped10++; skipped10++;
if (config3.skipFrame && (skipped10 <= (config3.body.skipFrames || 0) && (config3.body.skipTime || 0) <= now() - cache5.last)) { const skipTime = (config3.body.skipTime || 0) > now() - cache5.last;
const skipFrame = skipped10 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
return cache5.bodies; return cache5.bodies;
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -43657,8 +43679,8 @@ async function predict10(input, config3) {
// src/object/nanodet.ts // src/object/nanodet.ts
var model11; var model11;
var last7 = []; var last4 = [];
var lastTime7 = 0; var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER; var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5; var scaleBox = 2.5;
async function load12(config3) { async function load12(config3) {
@ -43738,13 +43760,15 @@ async function process4(res, inputSize8, outputShape, config3) {
return results; return results;
} }
async function predict11(image6, config3) { async function predict11(image6, config3) {
if (skipped11 < (config3.object.skipFrames || 0) && (config3.object.skipTime || 0) <= now() - lastTime7 && config3.skipFrame && last7.length > 0) { const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
skipped11++; skipped11++;
return last7; return last4;
} }
skipped11 = 0; skipped11 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last7; return last4;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [image6.shape[2], image6.shape[1]]; const outputSize2 = [image6.shape[2], image6.shape[1]];
const resize = Cn.resizeBilinear(image6, [model11.inputSize, model11.inputSize], false); const resize = Cn.resizeBilinear(image6, [model11.inputSize, model11.inputSize], false);
@ -43755,10 +43779,10 @@ async function predict11(image6, config3) {
let objectT; let objectT;
if (config3.object.enabled) if (config3.object.enabled)
objectT = await model11.predict(transpose); objectT = await model11.predict(transpose);
lastTime7 = now(); lastTime10 = now();
De(transpose); De(transpose);
const obj = await process4(objectT, model11.inputSize, outputSize2, config3); const obj = await process4(objectT, model11.inputSize, outputSize2, config3);
last7 = obj; last4 = obj;
resolve(obj); resolve(obj);
}); });
} }
@ -44953,7 +44977,7 @@ async function canvas2(input, output) {
async function all(inCanvas2, result, drawOptions) { async function all(inCanvas2, result, drawOptions) {
if (!result || !result.performance || !result || !inCanvas2) if (!result || !result.performance || !result || !inCanvas2)
return null; return null;
const timestamp = now(); const timeStamp = now();
const localOptions = mergeDeep(options2, drawOptions); const localOptions = mergeDeep(options2, drawOptions);
const promise = Promise.all([ const promise = Promise.all([
face(inCanvas2, result.face, localOptions), face(inCanvas2, result.face, localOptions),
@ -44962,7 +44986,7 @@ async function all(inCanvas2, result, drawOptions) {
object(inCanvas2, result.object, localOptions), object(inCanvas2, result.object, localOptions),
gesture(inCanvas2, result.gesture, localOptions) gesture(inCanvas2, result.gesture, localOptions)
]); ]);
result.performance.draw = Math.trunc(now() - timestamp); result.performance.draw = env.perfadd ? (result.performance.draw || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
return promise; return promise;
} }
@ -45087,7 +45111,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:face"; parent.state = "run:face";
timeStamp = now(); timeStamp = now();
const faces = await predict6(input, parent.config); const faces = await predict6(input, parent.config);
parent.performance.face = Math.trunc(now() - timeStamp); parent.performance.face = env.perfadd ? (parent.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (!input.shape || input.shape.length !== 4) if (!input.shape || input.shape.length !== 4)
return []; return [];
if (!faces) if (!faces)
@ -45106,7 +45130,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:emotion"; parent.state = "run:emotion";
timeStamp = now(); timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict5(faces[i].tensor || Dr([]), parent.config, i, faces.length) : null; emotionRes = parent.config.face.emotion.enabled ? await predict5(faces[i].tensor || Dr([]), parent.config, i, faces.length) : null;
parent.performance.emotion = Math.trunc(now() - timeStamp); parent.performance.emotion = env.perfadd ? (parent.performance.emotion || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End Emotion:"); parent.analyze("End Emotion:");
parent.analyze("Start AntiSpoof:"); parent.analyze("Start AntiSpoof:");
@ -45116,7 +45140,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:antispoof"; parent.state = "run:antispoof";
timeStamp = now(); timeStamp = now();
antispoofRes = parent.config.face.antispoof.enabled ? await predict(faces[i].tensor || Dr([]), parent.config, i, faces.length) : null; antispoofRes = parent.config.face.antispoof.enabled ? await predict(faces[i].tensor || Dr([]), parent.config, i, faces.length) : null;
parent.performance.antispoof = Math.trunc(now() - timeStamp); parent.performance.antispoof = env.perfadd ? (parent.performance.antispoof || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End AntiSpoof:"); parent.analyze("End AntiSpoof:");
parent.analyze("Start Description:"); parent.analyze("Start Description:");
@ -45126,7 +45150,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:description"; parent.state = "run:description";
timeStamp = now(); timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict7(faces[i].tensor || Dr([]), parent.config, i, faces.length) : null; descRes = parent.config.face.description.enabled ? await predict7(faces[i].tensor || Dr([]), parent.config, i, faces.length) : null;
parent.performance.embedding = Math.trunc(now() - timeStamp); parent.performance.embedding = env.perfadd ? (parent.performance.embedding || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End Description:"); parent.analyze("End Description:");
if (parent.config.async) { if (parent.config.async) {
@ -46444,7 +46468,7 @@ var Human = class {
const count2 = Object.values(this.models).filter((model14) => model14).length; const count2 = Object.values(this.models).filter((model14) => model14).length;
if (userConfig) if (userConfig)
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (env.initial) { if (this.env.initial) {
if (this.config.debug) if (this.config.debug)
log(`version: ${this.version}`); log(`version: ${this.version}`);
if (this.config.debug) if (this.config.debug)
@ -46460,9 +46484,9 @@ var Human = class {
} }
} }
await load15(this); await load15(this);
if (env.initial && this.config.debug) if (this.env.initial && this.config.debug)
log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors"); log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors");
env.initial = false; this.env.initial = false;
const loaded = Object.values(this.models).filter((model14) => model14).length; const loaded = Object.values(this.models).filter((model14) => model14).length;
if (loaded !== count2) { if (loaded !== count2) {
await validate2(this); await validate2(this);
@ -46470,7 +46494,7 @@ var Human = class {
} }
const current = Math.trunc(now() - timeStamp); const current = Math.trunc(now() - timeStamp);
if (current > (this.performance.load || 0)) if (current > (this.performance.load || 0))
this.performance.load = current; this.performance.load = this.env.perfadd ? (this.performance.load || 0) + current : current;
} }
next(result = this.result) { next(result = this.result) {
return calc2(result, this.config); return calc2(result, this.config);
@ -46484,7 +46508,6 @@ var Human = class {
var _a2, _b, _c2, _d2, _e, _f2, _g, _h2, _i2, _j2, _k2, _l2, _m2, _n2, _o2, _p2, _q2, _r, _s2, _t2, _u2, _v2; var _a2, _b, _c2, _d2, _e, _f2, _g, _h2, _i2, _j2, _k2, _l2, _m2, _n2, _o2, _p2, _q2, _r, _s2, _t2, _u2, _v2;
this.state = "config"; this.state = "config";
let timeStamp; let timeStamp;
let elapsedTime;
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
this.state = "check"; this.state = "check";
const error = __privateGet(this, _sanity).call(this, input); const error = __privateGet(this, _sanity).call(this, input);
@ -46499,7 +46522,7 @@ var Human = class {
this.state = "image"; this.state = "image";
const img = process2(input, this.config); const img = process2(input, this.config);
this.process = img; this.process = img;
this.performance.image = Math.trunc(now() - timeStamp); this.performance.image = this.env.perfadd ? (this.performance.image || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:"); this.analyze("Get Image:");
if (!img.tensor) { if (!img.tensor) {
if (this.config.debug) if (this.config.debug)
@ -46509,15 +46532,15 @@ var Human = class {
} }
this.emit("image"); this.emit("image");
timeStamp = now(); timeStamp = now();
this.config.skipFrame = await skip(this.config, img.tensor); this.config.skipAllowed = await skip(this.config, img.tensor);
if (!this.performance.frames) if (!this.performance.frames)
this.performance.frames = 0; this.performance.frames = 0;
if (!this.performance.cached) if (!this.performance.cached)
this.performance.cached = 0; this.performance.cached = 0;
this.performance.frames++; this.performance.frames++;
if (this.config.skipFrame) if (this.config.skipAllowed)
this.performance.cached++; this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp); this.performance.changed = this.env.perfadd ? (this.performance.changed || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:"); this.analyze("Check Changed:");
let faceRes = []; let faceRes = [];
let bodyRes = []; let bodyRes = [];
@ -46531,9 +46554,7 @@ var Human = class {
} else { } else {
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.face = this.env.perfadd ? (this.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.face = elapsedTime;
} }
if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1)) if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1))
faceRes = await faceRes; faceRes = await faceRes;
@ -46561,9 +46582,7 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict4(img.tensor, bodyConfig) : []; bodyRes = this.config.body.enabled ? await predict4(img.tensor, bodyConfig) : [];
else if ((_h2 = this.config.body.modelPath) == null ? void 0 : _h2.includes("movenet")) else if ((_h2 = this.config.body.modelPath) == null ? void 0 : _h2.includes("movenet"))
bodyRes = this.config.body.enabled ? await predict10(img.tensor, bodyConfig) : []; bodyRes = this.config.body.enabled ? await predict10(img.tensor, bodyConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.body = this.env.perfadd ? (this.performance.body || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.body = elapsedTime;
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
@ -46582,9 +46601,7 @@ var Human = class {
handRes = this.config.hand.enabled ? await predict8(img.tensor, handConfig) : []; handRes = this.config.hand.enabled ? await predict8(img.tensor, handConfig) : [];
else if ((_p2 = (_o2 = this.config.hand.detector) == null ? void 0 : _o2.modelPath) == null ? void 0 : _p2.includes("handtrack")) else if ((_p2 = (_o2 = this.config.hand.detector) == null ? void 0 : _o2.modelPath) == null ? void 0 : _p2.includes("handtrack"))
handRes = this.config.hand.enabled ? await predict9(img.tensor, handConfig) : []; handRes = this.config.hand.enabled ? await predict9(img.tensor, handConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.hand = this.env.perfadd ? (this.performance.hand || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.hand = elapsedTime;
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
@ -46602,9 +46619,7 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict11(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict11(img.tensor, this.config) : [];
else if ((_t2 = this.config.object.modelPath) == null ? void 0 : _t2.includes("centernet")) else if ((_t2 = this.config.object.modelPath) == null ? void 0 : _t2.includes("centernet"))
objectRes = this.config.object.enabled ? await predict3(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict3(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.object = this.env.perfadd ? (this.performance.object || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
this.state = "detect:await"; this.state = "detect:await";
@ -46616,7 +46631,7 @@ var Human = class {
timeStamp = now(); timeStamp = now();
gestureRes = [...face2(faceRes), ...body2(bodyRes), ...hand2(handRes), ...iris3(faceRes)]; gestureRes = [...face2(faceRes), ...body2(bodyRes), ...hand2(handRes), ...iris3(faceRes)];
if (!this.config.async) if (!this.config.async)
this.performance.gesture = Math.trunc(now() - timeStamp); this.performance.gesture = this.env.perfadd ? (this.performance.gesture || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
else if (this.performance.gesture) else if (this.performance.gesture)
delete this.performance.gesture; delete this.performance.gesture;
} }

File diff suppressed because one or more lines are too long

270
dist/human.js vendored

File diff suppressed because one or more lines are too long

223
dist/human.node-gpu.js vendored
View File

@ -152,8 +152,8 @@ var config = {
debug: true, debug: true,
async: true, async: true,
warmup: "full", warmup: "full",
cacheSensitivity: 0.75, cacheSensitivity: 0.7,
skipFrame: false, skipAllowed: false,
filter: { filter: {
enabled: true, enabled: true,
width: 0, width: 0,
@ -183,8 +183,8 @@ var config = {
modelPath: "blazeface.json", modelPath: "blazeface.json",
rotation: true, rotation: true,
maxDetected: 1, maxDetected: 1,
skipFrames: 11, skipFrames: 99,
skipTime: 2e3, skipTime: 2500,
minConfidence: 0.2, minConfidence: 0.2,
iouThreshold: 0.1, iouThreshold: 0.1,
return: false return: false
@ -200,21 +200,21 @@ var config = {
emotion: { emotion: {
enabled: true, enabled: true,
minConfidence: 0.1, minConfidence: 0.1,
skipFrames: 12, skipFrames: 99,
skipTime: 2e3, skipTime: 1500,
modelPath: "emotion.json" modelPath: "emotion.json"
}, },
description: { description: {
enabled: true, enabled: true,
modelPath: "faceres.json", modelPath: "faceres.json",
skipFrames: 13, skipFrames: 99,
skipTime: 2e3, skipTime: 3e3,
minConfidence: 0.1 minConfidence: 0.1
}, },
antispoof: { antispoof: {
enabled: false, enabled: false,
skipFrames: 14, skipFrames: 99,
skipTime: 2e3, skipTime: 4e3,
modelPath: "antispoof.json" modelPath: "antispoof.json"
} }
}, },
@ -227,12 +227,12 @@ var config = {
maxDetected: -1, maxDetected: -1,
minConfidence: 0.3, minConfidence: 0.3,
skipFrames: 1, skipFrames: 1,
skipTime: 2e3 skipTime: 200
}, },
hand: { hand: {
enabled: true, enabled: true,
rotation: true, rotation: true,
skipFrames: 2, skipFrames: 99,
skipTime: 2e3, skipTime: 2e3,
minConfidence: 0.5, minConfidence: 0.5,
iouThreshold: 0.2, iouThreshold: 0.2,
@ -251,8 +251,8 @@ var config = {
minConfidence: 0.2, minConfidence: 0.2,
iouThreshold: 0.4, iouThreshold: 0.4,
maxDetected: 10, maxDetected: 10,
skipFrames: 15, skipFrames: 99,
skipTime: 2e3 skipTime: 1e3
}, },
segmentation: { segmentation: {
enabled: false, enabled: false,
@ -1190,6 +1190,7 @@ var Env = class {
__publicField(this, "filter"); __publicField(this, "filter");
__publicField(this, "tfjs"); __publicField(this, "tfjs");
__publicField(this, "offscreen"); __publicField(this, "offscreen");
__publicField(this, "perfadd", false);
__publicField(this, "wasm", { __publicField(this, "wasm", {
supported: void 0, supported: void 0,
backend: void 0, backend: void 0,
@ -1321,7 +1322,7 @@ var model2;
var cached = []; var cached = [];
var skipped2 = Number.MAX_SAFE_INTEGER; var skipped2 = Number.MAX_SAFE_INTEGER;
var lastCount = 0; var lastCount = 0;
var last = 0; var lastTime = 0;
async function load2(config3) { async function load2(config3) {
var _a, _b; var _a, _b;
if (env.initial) if (env.initial)
@ -1340,7 +1341,9 @@ async function predict(image25, config3, idx, count2) {
var _a, _b; var _a, _b;
if (!model2) if (!model2)
return null; return null;
if (skipped2 < (((_a = config3.face.antispoof) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.antispoof) == null ? void 0 : _b.skipTime) || 0) <= now() - last && config3.skipFrame && lastCount === count2 && cached[idx]) { const skipTime = (((_a = config3.face.antispoof) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime;
const skipFrame = skipped2 < (((_b = config3.face.antispoof) == null ? void 0 : _b.skipFrames) || 0);
if (config3.skipAllowed && skipTime && skipFrame && lastCount === count2 && cached[idx]) {
skipped2++; skipped2++;
return cached[idx]; return cached[idx];
} }
@ -1351,7 +1354,7 @@ async function predict(image25, config3, idx, count2) {
const num = (await res.data())[0]; const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100; cached[idx] = Math.round(100 * num) / 100;
lastCount = count2; lastCount = count2;
last = now(); lastTime = now();
tf4.dispose([resize, res]); tf4.dispose([resize, res]);
resolve(cached[idx]); resolve(cached[idx]);
}); });
@ -4936,7 +4939,7 @@ var skipped3 = Number.MAX_SAFE_INTEGER;
var outputNodes; var outputNodes;
var cache = null; var cache = null;
var padding = [[0, 0], [0, 0], [0, 0], [0, 0]]; var padding = [[0, 0], [0, 0], [0, 0], [0, 0]];
var last2 = 0; var lastTime2 = 0;
async function loadDetect(config3) { async function loadDetect(config3) {
var _a, _b, _c; var _a, _b, _c;
if (env3.initial) if (env3.initial)
@ -5053,11 +5056,13 @@ async function detectParts(input, config3, outputSize2) {
} }
async function predict2(input, config3) { async function predict2(input, config3) {
const outputSize2 = [input.shape[2] || 0, input.shape[1] || 0]; const outputSize2 = [input.shape[2] || 0, input.shape[1] || 0];
if (skipped3 < (config3.body.skipFrames || 0) && (config3.body.skipTime || 0) <= now() - last2 && config3.skipFrame && cache !== null) { const skipTime = (config3.body.skipTime || 0) > now() - lastTime2;
const skipFrame = skipped3 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && cache !== null) {
skipped3++; skipped3++;
} else { } else {
cache = await detectParts(input, config3, outputSize2); cache = await detectParts(input, config3, outputSize2);
last2 = now(); lastTime2 = now();
skipped3 = 0; skipped3 = 0;
} }
if (cache) if (cache)
@ -5155,8 +5160,8 @@ var labels = [
// src/object/centernet.ts // src/object/centernet.ts
var model4; var model4;
var inputSize3 = 0; var inputSize3 = 0;
var last3 = []; var last = [];
var lastTime = 0; var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER; var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) { async function load4(config3) {
if (env.initial) if (env.initial)
@ -5221,21 +5226,23 @@ async function process3(res, outputShape, config3) {
return results; return results;
} }
async function predict3(input, config3) { async function predict3(input, config3) {
if (skipped4 < (config3.object.skipFrames || 0) && (config3.object.skipTime || 0) <= now() - lastTime && config3.skipFrame && last3.length > 0) { const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
skipped4++; skipped4++;
return last3; return last;
} }
skipped4 = 0; skipped4 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last3; return last;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [input.shape[2], input.shape[1]]; const outputSize2 = [input.shape[2], input.shape[1]];
const resize = tf8.image.resizeBilinear(input, [inputSize3, inputSize3]); const resize = tf8.image.resizeBilinear(input, [inputSize3, inputSize3]);
const objectT = config3.object.enabled ? model4 == null ? void 0 : model4.execute(resize, ["tower_0/detections"]) : null; const objectT = config3.object.enabled ? model4 == null ? void 0 : model4.execute(resize, ["tower_0/detections"]) : null;
lastTime = now(); lastTime3 = now();
tf8.dispose(resize); tf8.dispose(resize);
const obj = await process3(objectT, outputSize2, config3); const obj = await process3(objectT, outputSize2, config3);
last3 = obj; last = obj;
resolve(obj); resolve(obj);
}); });
} }
@ -5278,7 +5285,7 @@ var connected2 = {
// src/body/efficientpose.ts // src/body/efficientpose.ts
var model5; var model5;
var last4 = 0; var lastTime4 = 0;
var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} }; var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };
var skipped5 = Number.MAX_SAFE_INTEGER; var skipped5 = Number.MAX_SAFE_INTEGER;
async function load5(config3) { async function load5(config3) {
@ -5310,14 +5317,15 @@ function max2d(inputs, minScore) {
}); });
} }
async function predict4(image25, config3) { async function predict4(image25, config3) {
var _a; const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
if (skipped5 < (((_a = config3.body) == null ? void 0 : _a.skipFrames) || 0) && config3.skipFrame && Object.keys(cache2.keypoints).length > 0 && (config3.body.skipTime || 0) <= now() - last4) { const skipFrame = skipped5 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
skipped5++; skipped5++;
return [cache2]; return [cache2];
} }
skipped5 = 0; skipped5 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
var _a2; var _a;
const tensor3 = tf9.tidy(() => { const tensor3 = tf9.tidy(() => {
if (!(model5 == null ? void 0 : model5.inputs[0].shape)) if (!(model5 == null ? void 0 : model5.inputs[0].shape))
return null; return null;
@ -5329,7 +5337,7 @@ async function predict4(image25, config3) {
let resT; let resT;
if (config3.body.enabled) if (config3.body.enabled)
resT = await (model5 == null ? void 0 : model5.predict(tensor3)); resT = await (model5 == null ? void 0 : model5.predict(tensor3));
last4 = now(); lastTime4 = now();
tf9.dispose(tensor3); tf9.dispose(tensor3);
if (resT) { if (resT) {
cache2.keypoints.length = 0; cache2.keypoints.length = 0;
@ -5339,7 +5347,7 @@ async function predict4(image25, config3) {
tf9.dispose(squeeze8); tf9.dispose(squeeze8);
for (let id = 0; id < stack3.length; id++) { for (let id = 0; id < stack3.length; id++) {
const [x2, y2, partScore] = max2d(stack3[id], config3.body.minConfidence); const [x2, y2, partScore] = max2d(stack3[id], config3.body.minConfidence);
if (partScore > (((_a2 = config3.body) == null ? void 0 : _a2.minConfidence) || 0)) { if (partScore > (((_a = config3.body) == null ? void 0 : _a.minConfidence) || 0)) {
cache2.keypoints.push({ cache2.keypoints.push({
score: Math.round(100 * partScore) / 100, score: Math.round(100 * partScore) / 100,
part: kpt2[id], part: kpt2[id],
@ -5391,9 +5399,9 @@ async function predict4(image25, config3) {
var tf10 = __toModule(require_tfjs_esm()); var tf10 = __toModule(require_tfjs_esm());
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"]; var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model6; var model6;
var last5 = []; var last2 = [];
var lastCount2 = 0; var lastCount2 = 0;
var lastTime2 = 0; var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER; var skipped6 = Number.MAX_SAFE_INTEGER;
var rgb = [0.2989, 0.587, 0.114]; var rgb = [0.2989, 0.587, 0.114];
async function load6(config3) { async function load6(config3) {
@ -5414,9 +5422,11 @@ async function predict5(image25, config3, idx, count2) {
var _a, _b; var _a, _b;
if (!model6) if (!model6)
return null; return null;
if (skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) <= now() - lastTime2 && config3.skipFrame && lastCount2 === count2 && last5[idx] && last5[idx].length > 0) { const skipFrame = skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last2[idx] && last2[idx].length > 0) {
skipped6++; skipped6++;
return last5[idx]; return last2[idx];
} }
skipped6 = 0; skipped6 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -5439,7 +5449,7 @@ async function predict5(image25, config3, idx, count2) {
const normalize = tf10.tidy(() => tf10.mul(tf10.sub(grayscale, 0.5), 2)); const normalize = tf10.tidy(() => tf10.mul(tf10.sub(grayscale, 0.5), 2));
tf10.dispose(grayscale); tf10.dispose(grayscale);
const emotionT = await (model6 == null ? void 0 : model6.predict(normalize)); const emotionT = await (model6 == null ? void 0 : model6.predict(normalize));
lastTime2 = now(); lastTime5 = now();
const data = await emotionT.data(); const data = await emotionT.data();
tf10.dispose(emotionT); tf10.dispose(emotionT);
for (let i = 0; i < data.length; i++) { for (let i = 0; i < data.length; i++) {
@ -5449,7 +5459,7 @@ async function predict5(image25, config3, idx, count2) {
obj.sort((a, b) => b.score - a.score); obj.sort((a, b) => b.score - a.score);
tf10.dispose(normalize); tf10.dispose(normalize);
} }
last5[idx] = obj; last2[idx] = obj;
lastCount2 = count2; lastCount2 = count2;
resolve(obj); resolve(obj);
}); });
@ -5596,13 +5606,15 @@ var boxCache = [];
var model8 = null; var model8 = null;
var inputSize5 = 0; var inputSize5 = 0;
var skipped7 = Number.MAX_SAFE_INTEGER; var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime3 = 0; var lastTime6 = 0;
var detectedFaces = 0; var detectedFaces = 0;
async function predict6(input, config3) { async function predict6(input, config3) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
if (!config3.skipFrame || (detectedFaces !== ((_a = config3.face.detector) == null ? void 0 : _a.maxDetected) || !((_b = config3.face.mesh) == null ? void 0 : _b.enabled)) && (skipped7 > (((_c = config3.face.detector) == null ? void 0 : _c.skipFrames) || 0) && (((_d = config3.face.description) == null ? void 0 : _d.skipTime) || 0) <= now() - lastTime3)) { const skipTime = (((_a = config3.face.detector) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes2 = await getBoxes(input, config3); const newBoxes2 = await getBoxes(input, config3);
lastTime3 = now(); lastTime6 = now();
boxCache = []; boxCache = [];
for (const possible of newBoxes2.boxes) { for (const possible of newBoxes2.boxes) {
const startPoint = await possible.box.startPoint.data(); const startPoint = await possible.box.startPoint.data();
@ -5638,16 +5650,16 @@ async function predict6(input, config3) {
faceScore: 0, faceScore: 0,
annotations: {} annotations: {}
}; };
if (((_e = config3.face.detector) == null ? void 0 : _e.rotation) && ((_f = config3.face.mesh) == null ? void 0 : _f.enabled) && env.kernels.includes("rotatewithoffset")) { if (((_c = config3.face.detector) == null ? void 0 : _c.rotation) && ((_d = config3.face.mesh) == null ? void 0 : _d.enabled) && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else { } else {
rotationMatrix = IDENTITY_MATRIX; rotationMatrix = IDENTITY_MATRIX;
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, ((_g = config3.face.mesh) == null ? void 0 : _g.enabled) ? [inputSize5, inputSize5] : [size(), size()]); const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
face5.tensor = tf12.div(cut, 255); face5.tensor = tf12.div(cut, 255);
tf12.dispose(cut); tf12.dispose(cut);
} }
face5.boxScore = Math.round(100 * box4.confidence) / 100; face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!((_h = config3.face.mesh) == null ? void 0 : _h.enabled)) { if (!((_f = config3.face.mesh) == null ? void 0 : _f.enabled)) {
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * box4.confidence || 0) / 100; face5.score = Math.round(100 * box4.confidence || 0) / 100;
@ -5670,17 +5682,17 @@ async function predict6(input, config3) {
let rawCoords = await coordsReshaped.array(); let rawCoords = await coordsReshaped.array();
tf12.dispose(contourCoords); tf12.dispose(contourCoords);
tf12.dispose(coordsReshaped); tf12.dispose(coordsReshaped);
if (faceConfidence < (((_i = config3.face.detector) == null ? void 0 : _i.minConfidence) || 1)) { if (faceConfidence < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
box4.confidence = faceConfidence; box4.confidence = faceConfidence;
} else { } else {
if ((_j = config3.face.iris) == null ? void 0 : _j.enabled) if ((_h = config3.face.iris) == null ? void 0 : _h.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5); rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5); face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]); face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence }; box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]); face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (((_k = config3.face.detector) == null ? void 0 : _k.rotation) && config3.face.mesh.enabled && ((_l = config3.face.description) == null ? void 0 : _l.enabled) && env.kernels.includes("rotatewithoffset")) { if (((_i = config3.face.detector) == null ? void 0 : _i.rotation) && config3.face.mesh.enabled && ((_j = config3.face.description) == null ? void 0 : _j.enabled) && env.kernels.includes("rotatewithoffset")) {
tf12.dispose(face5.tensor); tf12.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} }
@ -5694,7 +5706,7 @@ async function predict6(input, config3) {
faces.push(face5); faces.push(face5);
newBoxes.push(box4); newBoxes.push(box4);
} }
if ((_m = config3.face.mesh) == null ? void 0 : _m.enabled) if ((_k = config3.face.mesh) == null ? void 0 : _k.enabled)
boxCache = newBoxes.filter((a) => { boxCache = newBoxes.filter((a) => {
var _a2; var _a2;
return a.confidence > (((_a2 = config3.face.detector) == null ? void 0 : _a2.minConfidence) || 0); return a.confidence > (((_a2 = config3.face.detector) == null ? void 0 : _a2.minConfidence) || 0);
@ -5725,8 +5737,8 @@ var uvmap = UV468;
// src/face/faceres.ts // src/face/faceres.ts
var tf13 = __toModule(require_tfjs_esm()); var tf13 = __toModule(require_tfjs_esm());
var model9; var model9;
var last6 = []; var last3 = [];
var lastTime4 = 0; var lastTime7 = 0;
var lastCount3 = 0; var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER; var skipped8 = Number.MAX_SAFE_INTEGER;
async function load9(config3) { async function load9(config3) {
@ -5762,9 +5774,11 @@ async function predict7(image25, config3, idx, count2) {
var _a, _b, _c, _d; var _a, _b, _c, _d;
if (!model9) if (!model9)
return null; return null;
if (skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) <= now() - lastTime4 && config3.skipFrame && lastCount3 === count2 && ((_c = last6[idx]) == null ? void 0 : _c.age) && ((_d = last6[idx]) == null ? void 0 : _d.age) > 0) { const skipFrame = skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last3[idx]) == null ? void 0 : _c.age) && ((_d = last3[idx]) == null ? void 0 : _d.age) > 0) {
skipped8++; skipped8++;
return last6[idx]; return last3[idx];
} }
skipped8 = 0; skipped8 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -5778,7 +5792,7 @@ async function predict7(image25, config3, idx, count2) {
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) { if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
const enhanced = enhance(image25); const enhanced = enhance(image25);
const resT = await (model9 == null ? void 0 : model9.predict(enhanced)); const resT = await (model9 == null ? void 0 : model9.predict(enhanced));
lastTime4 = now(); lastTime7 = now();
tf13.dispose(enhanced); tf13.dispose(enhanced);
const genderT = await resT.find((t) => t.shape[1] === 1); const genderT = await resT.find((t) => t.shape[1] === 1);
const gender = await genderT.data(); const gender = await genderT.data();
@ -5798,7 +5812,7 @@ async function predict7(image25, config3, idx, count2) {
obj.descriptor = Array.from(descriptor); obj.descriptor = Array.from(descriptor);
resT.forEach((t) => tf13.dispose(t)); resT.forEach((t) => tf13.dispose(t));
} }
last6[idx] = obj; last3[idx] = obj;
lastCount3 = count2; lastCount3 = count2;
resolve(obj); resolve(obj);
}); });
@ -8953,7 +8967,7 @@ var handBoxEnlargeFactor = 1.65;
var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2]; var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
var palmLandmarksPalmBase = 0; var palmLandmarksPalmBase = 0;
var palmLandmarksMiddleFingerBase = 2; var palmLandmarksMiddleFingerBase = 2;
var lastTime5 = 0; var lastTime8 = 0;
var HandPipeline = class { var HandPipeline = class {
constructor(handDetector, handPoseModel2) { constructor(handDetector, handPoseModel2) {
__publicField(this, "handDetector"); __publicField(this, "handDetector");
@ -8966,7 +8980,7 @@ var HandPipeline = class {
this.handPoseModel = handPoseModel2; this.handPoseModel = handPoseModel2;
this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0; this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0;
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = Number.MAX_SAFE_INTEGER;
this.detectedHands = 0; this.detectedHands = 0;
} }
calculateLandmarksBoundingBox(landmarks) { calculateLandmarksBoundingBox(landmarks) {
@ -9018,11 +9032,13 @@ var HandPipeline = class {
async estimateHands(image25, config3) { async estimateHands(image25, config3) {
let useFreshBox = false; let useFreshBox = false;
let boxes; let boxes;
if (this.skipped === 0 || this.skipped > config3.hand.skipFrames && (config3.hand.skipTime || 0) <= now() - lastTime5 || !config3.hand.landmarks || !config3.skipFrame) { const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
boxes = await this.handDetector.estimateHandBounds(image25, config3); boxes = await this.handDetector.estimateHandBounds(image25, config3);
this.skipped = 0; this.skipped = 0;
} }
if (config3.skipFrame) if (config3.skipAllowed)
this.skipped++; this.skipped++;
if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config3.hand.maxDetected || !config3.hand.landmarks)) { if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config3.hand.maxDetected || !config3.hand.landmarks)) {
this.detectedHands = 0; this.detectedHands = 0;
@ -9047,7 +9063,7 @@ var HandPipeline = class {
tf16.dispose(croppedInput); tf16.dispose(croppedInput);
tf16.dispose(rotatedImage); tf16.dispose(rotatedImage);
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage); const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
lastTime5 = now(); lastTime8 = now();
tf16.dispose(handImage); tf16.dispose(handImage);
const confidence = (await confidenceT.data())[0]; const confidence = (await confidenceT.data())[0];
tf16.dispose(confidenceT); tf16.dispose(confidenceT);
@ -9616,8 +9632,8 @@ var faceIndex = 4;
var boxExpandFact = 1.6; var boxExpandFact = 1.6;
var maxDetectorResolution = 512; var maxDetectorResolution = 512;
var detectorExpandFact = 1.4; var detectorExpandFact = 1.4;
var skipped9 = 0; var skipped9 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0; var lastTime9 = 0;
var outputSize = [0, 0]; var outputSize = [0, 0];
var cache3 = { var cache3 = {
boxes: [], boxes: [],
@ -9757,17 +9773,21 @@ async function predict9(input, config3) {
return []; return [];
outputSize = [input.shape[2] || 0, input.shape[1] || 0]; outputSize = [input.shape[2] || 0, input.shape[1] || 0];
skipped9++; skipped9++;
if (config3.skipFrame && skipped9 <= (config3.hand.skipFrames || 0) && (config3.hand.skipTime || 0) <= now() - lastTime6) { const skipTime = (config3.hand.skipTime || 0) > now() - lastTime9;
const skipFrame = skipped9 < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
return cache3.hands; return cache3.hands;
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (config3.skipFrame && cache3.hands.length === config3.hand.maxDetected) { const skipTimeExtended = 3 * (config3.hand.skipTime || 0) > now() - lastTime9;
const skipFrameExtended = skipped9 < 3 * (config3.hand.skipFrames || 0);
if (config3.skipAllowed && cache3.hands.length === config3.hand.maxDetected) {
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
} else if (config3.skipFrame && skipped9 < 3 * (config3.hand.skipFrames || 0) && (config3.hand.skipTime || 0) <= 3 * (now() - lastTime6) && cache3.hands.length > 0) { } else if (config3.skipAllowed && skipTimeExtended && skipFrameExtended && cache3.hands.length > 0) {
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
} else { } else {
cache3.boxes = await detectHands(input, config3); cache3.boxes = await detectHands(input, config3);
lastTime6 = now(); lastTime9 = now();
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
skipped9 = 0; skipped9 = 0;
} }
@ -10066,10 +10086,12 @@ async function parseMultiPose(res, config3, image25, inputBox) {
async function predict10(input, config3) { async function predict10(input, config3) {
if (!model10 || !(model10 == null ? void 0 : model10.inputs[0].shape)) if (!model10 || !(model10 == null ? void 0 : model10.inputs[0].shape))
return []; return [];
if (!config3.skipFrame) if (!config3.skipAllowed)
cache5.boxes.length = 0; cache5.boxes.length = 0;
skipped10++; skipped10++;
if (config3.skipFrame && (skipped10 <= (config3.body.skipFrames || 0) && (config3.body.skipTime || 0) <= now() - cache5.last)) { const skipTime = (config3.body.skipTime || 0) > now() - cache5.last;
const skipFrame = skipped10 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
return cache5.bodies; return cache5.bodies;
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -10092,8 +10114,8 @@ async function predict10(input, config3) {
// src/object/nanodet.ts // src/object/nanodet.ts
var tf21 = __toModule(require_tfjs_esm()); var tf21 = __toModule(require_tfjs_esm());
var model11; var model11;
var last7 = []; var last4 = [];
var lastTime7 = 0; var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER; var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5; var scaleBox = 2.5;
async function load12(config3) { async function load12(config3) {
@ -10173,13 +10195,15 @@ async function process4(res, inputSize8, outputShape, config3) {
return results; return results;
} }
async function predict11(image25, config3) { async function predict11(image25, config3) {
if (skipped11 < (config3.object.skipFrames || 0) && (config3.object.skipTime || 0) <= now() - lastTime7 && config3.skipFrame && last7.length > 0) { const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
skipped11++; skipped11++;
return last7; return last4;
} }
skipped11 = 0; skipped11 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last7; return last4;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]]; const outputSize2 = [image25.shape[2], image25.shape[1]];
const resize = tf21.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false); const resize = tf21.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
@ -10190,10 +10214,10 @@ async function predict11(image25, config3) {
let objectT; let objectT;
if (config3.object.enabled) if (config3.object.enabled)
objectT = await model11.predict(transpose); objectT = await model11.predict(transpose);
lastTime7 = now(); lastTime10 = now();
tf21.dispose(transpose); tf21.dispose(transpose);
const obj = await process4(objectT, model11.inputSize, outputSize2, config3); const obj = await process4(objectT, model11.inputSize, outputSize2, config3);
last7 = obj; last4 = obj;
resolve(obj); resolve(obj);
}); });
} }
@ -11393,7 +11417,7 @@ async function canvas2(input, output) {
async function all(inCanvas2, result, drawOptions) { async function all(inCanvas2, result, drawOptions) {
if (!result || !result.performance || !result || !inCanvas2) if (!result || !result.performance || !result || !inCanvas2)
return null; return null;
const timestamp = now(); const timeStamp = now();
const localOptions = mergeDeep(options2, drawOptions); const localOptions = mergeDeep(options2, drawOptions);
const promise = Promise.all([ const promise = Promise.all([
face(inCanvas2, result.face, localOptions), face(inCanvas2, result.face, localOptions),
@ -11402,7 +11426,7 @@ async function all(inCanvas2, result, drawOptions) {
object(inCanvas2, result.object, localOptions), object(inCanvas2, result.object, localOptions),
gesture(inCanvas2, result.gesture, localOptions) gesture(inCanvas2, result.gesture, localOptions)
]); ]);
result.performance.draw = Math.trunc(now() - timestamp); result.performance.draw = env.perfadd ? (result.performance.draw || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
return promise; return promise;
} }
@ -11530,7 +11554,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:face"; parent.state = "run:face";
timeStamp = now(); timeStamp = now();
const faces = await predict6(input, parent.config); const faces = await predict6(input, parent.config);
parent.performance.face = Math.trunc(now() - timeStamp); parent.performance.face = env.perfadd ? (parent.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (!input.shape || input.shape.length !== 4) if (!input.shape || input.shape.length !== 4)
return []; return [];
if (!faces) if (!faces)
@ -11549,7 +11573,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:emotion"; parent.state = "run:emotion";
timeStamp = now(); timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict5(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null; emotionRes = parent.config.face.emotion.enabled ? await predict5(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null;
parent.performance.emotion = Math.trunc(now() - timeStamp); parent.performance.emotion = env.perfadd ? (parent.performance.emotion || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End Emotion:"); parent.analyze("End Emotion:");
parent.analyze("Start AntiSpoof:"); parent.analyze("Start AntiSpoof:");
@ -11559,7 +11583,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:antispoof"; parent.state = "run:antispoof";
timeStamp = now(); timeStamp = now();
antispoofRes = parent.config.face.antispoof.enabled ? await predict(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null; antispoofRes = parent.config.face.antispoof.enabled ? await predict(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null;
parent.performance.antispoof = Math.trunc(now() - timeStamp); parent.performance.antispoof = env.perfadd ? (parent.performance.antispoof || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End AntiSpoof:"); parent.analyze("End AntiSpoof:");
parent.analyze("Start Description:"); parent.analyze("Start Description:");
@ -11569,7 +11593,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:description"; parent.state = "run:description";
timeStamp = now(); timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict7(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null; descRes = parent.config.face.description.enabled ? await predict7(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null;
parent.performance.embedding = Math.trunc(now() - timeStamp); parent.performance.embedding = env.perfadd ? (parent.performance.embedding || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End Description:"); parent.analyze("End Description:");
if (parent.config.async) { if (parent.config.async) {
@ -12888,7 +12912,7 @@ var Human = class {
const count2 = Object.values(this.models).filter((model14) => model14).length; const count2 = Object.values(this.models).filter((model14) => model14).length;
if (userConfig) if (userConfig)
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (env.initial) { if (this.env.initial) {
if (this.config.debug) if (this.config.debug)
log(`version: ${this.version}`); log(`version: ${this.version}`);
if (this.config.debug) if (this.config.debug)
@ -12904,9 +12928,9 @@ var Human = class {
} }
} }
await load15(this); await load15(this);
if (env.initial && this.config.debug) if (this.env.initial && this.config.debug)
log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors"); log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors");
env.initial = false; this.env.initial = false;
const loaded = Object.values(this.models).filter((model14) => model14).length; const loaded = Object.values(this.models).filter((model14) => model14).length;
if (loaded !== count2) { if (loaded !== count2) {
await validate2(this); await validate2(this);
@ -12914,7 +12938,7 @@ var Human = class {
} }
const current = Math.trunc(now() - timeStamp); const current = Math.trunc(now() - timeStamp);
if (current > (this.performance.load || 0)) if (current > (this.performance.load || 0))
this.performance.load = current; this.performance.load = this.env.perfadd ? (this.performance.load || 0) + current : current;
} }
next(result = this.result) { next(result = this.result) {
return calc2(result, this.config); return calc2(result, this.config);
@ -12928,7 +12952,6 @@ var Human = class {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
this.state = "config"; this.state = "config";
let timeStamp; let timeStamp;
let elapsedTime;
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
this.state = "check"; this.state = "check";
const error = __privateGet(this, _sanity).call(this, input); const error = __privateGet(this, _sanity).call(this, input);
@ -12943,7 +12966,7 @@ var Human = class {
this.state = "image"; this.state = "image";
const img = process2(input, this.config); const img = process2(input, this.config);
this.process = img; this.process = img;
this.performance.image = Math.trunc(now() - timeStamp); this.performance.image = this.env.perfadd ? (this.performance.image || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:"); this.analyze("Get Image:");
if (!img.tensor) { if (!img.tensor) {
if (this.config.debug) if (this.config.debug)
@ -12953,15 +12976,15 @@ var Human = class {
} }
this.emit("image"); this.emit("image");
timeStamp = now(); timeStamp = now();
this.config.skipFrame = await skip(this.config, img.tensor); this.config.skipAllowed = await skip(this.config, img.tensor);
if (!this.performance.frames) if (!this.performance.frames)
this.performance.frames = 0; this.performance.frames = 0;
if (!this.performance.cached) if (!this.performance.cached)
this.performance.cached = 0; this.performance.cached = 0;
this.performance.frames++; this.performance.frames++;
if (this.config.skipFrame) if (this.config.skipAllowed)
this.performance.cached++; this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp); this.performance.changed = this.env.perfadd ? (this.performance.changed || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:"); this.analyze("Check Changed:");
let faceRes = []; let faceRes = [];
let bodyRes = []; let bodyRes = [];
@ -12975,9 +12998,7 @@ var Human = class {
} else { } else {
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.face = this.env.perfadd ? (this.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.face = elapsedTime;
} }
if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1)) if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1))
faceRes = await faceRes; faceRes = await faceRes;
@ -13005,9 +13026,7 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict4(img.tensor, bodyConfig) : []; bodyRes = this.config.body.enabled ? await predict4(img.tensor, bodyConfig) : [];
else if ((_h = this.config.body.modelPath) == null ? void 0 : _h.includes("movenet")) else if ((_h = this.config.body.modelPath) == null ? void 0 : _h.includes("movenet"))
bodyRes = this.config.body.enabled ? await predict10(img.tensor, bodyConfig) : []; bodyRes = this.config.body.enabled ? await predict10(img.tensor, bodyConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.body = this.env.perfadd ? (this.performance.body || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.body = elapsedTime;
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
@ -13026,9 +13045,7 @@ var Human = class {
handRes = this.config.hand.enabled ? await predict8(img.tensor, handConfig) : []; handRes = this.config.hand.enabled ? await predict8(img.tensor, handConfig) : [];
else if ((_p = (_o = this.config.hand.detector) == null ? void 0 : _o.modelPath) == null ? void 0 : _p.includes("handtrack")) else if ((_p = (_o = this.config.hand.detector) == null ? void 0 : _o.modelPath) == null ? void 0 : _p.includes("handtrack"))
handRes = this.config.hand.enabled ? await predict9(img.tensor, handConfig) : []; handRes = this.config.hand.enabled ? await predict9(img.tensor, handConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.hand = this.env.perfadd ? (this.performance.hand || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.hand = elapsedTime;
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
@ -13046,9 +13063,7 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict11(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict11(img.tensor, this.config) : [];
else if ((_t = this.config.object.modelPath) == null ? void 0 : _t.includes("centernet")) else if ((_t = this.config.object.modelPath) == null ? void 0 : _t.includes("centernet"))
objectRes = this.config.object.enabled ? await predict3(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict3(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.object = this.env.perfadd ? (this.performance.object || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
this.state = "detect:await"; this.state = "detect:await";
@ -13060,7 +13075,7 @@ var Human = class {
timeStamp = now(); timeStamp = now();
gestureRes = [...face2(faceRes), ...body2(bodyRes), ...hand2(handRes), ...iris3(faceRes)]; gestureRes = [...face2(faceRes), ...body2(bodyRes), ...hand2(handRes), ...iris3(faceRes)];
if (!this.config.async) if (!this.config.async)
this.performance.gesture = Math.trunc(now() - timeStamp); this.performance.gesture = this.env.perfadd ? (this.performance.gesture || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
else if (this.performance.gesture) else if (this.performance.gesture)
delete this.performance.gesture; delete this.performance.gesture;
} }

View File

@ -153,8 +153,8 @@ var config = {
debug: true, debug: true,
async: true, async: true,
warmup: "full", warmup: "full",
cacheSensitivity: 0.75, cacheSensitivity: 0.7,
skipFrame: false, skipAllowed: false,
filter: { filter: {
enabled: true, enabled: true,
width: 0, width: 0,
@ -184,8 +184,8 @@ var config = {
modelPath: "blazeface.json", modelPath: "blazeface.json",
rotation: true, rotation: true,
maxDetected: 1, maxDetected: 1,
skipFrames: 11, skipFrames: 99,
skipTime: 2e3, skipTime: 2500,
minConfidence: 0.2, minConfidence: 0.2,
iouThreshold: 0.1, iouThreshold: 0.1,
return: false return: false
@ -201,21 +201,21 @@ var config = {
emotion: { emotion: {
enabled: true, enabled: true,
minConfidence: 0.1, minConfidence: 0.1,
skipFrames: 12, skipFrames: 99,
skipTime: 2e3, skipTime: 1500,
modelPath: "emotion.json" modelPath: "emotion.json"
}, },
description: { description: {
enabled: true, enabled: true,
modelPath: "faceres.json", modelPath: "faceres.json",
skipFrames: 13, skipFrames: 99,
skipTime: 2e3, skipTime: 3e3,
minConfidence: 0.1 minConfidence: 0.1
}, },
antispoof: { antispoof: {
enabled: false, enabled: false,
skipFrames: 14, skipFrames: 99,
skipTime: 2e3, skipTime: 4e3,
modelPath: "antispoof.json" modelPath: "antispoof.json"
} }
}, },
@ -228,12 +228,12 @@ var config = {
maxDetected: -1, maxDetected: -1,
minConfidence: 0.3, minConfidence: 0.3,
skipFrames: 1, skipFrames: 1,
skipTime: 2e3 skipTime: 200
}, },
hand: { hand: {
enabled: true, enabled: true,
rotation: true, rotation: true,
skipFrames: 2, skipFrames: 99,
skipTime: 2e3, skipTime: 2e3,
minConfidence: 0.5, minConfidence: 0.5,
iouThreshold: 0.2, iouThreshold: 0.2,
@ -252,8 +252,8 @@ var config = {
minConfidence: 0.2, minConfidence: 0.2,
iouThreshold: 0.4, iouThreshold: 0.4,
maxDetected: 10, maxDetected: 10,
skipFrames: 15, skipFrames: 99,
skipTime: 2e3 skipTime: 1e3
}, },
segmentation: { segmentation: {
enabled: false, enabled: false,
@ -1191,6 +1191,7 @@ var Env = class {
__publicField(this, "filter"); __publicField(this, "filter");
__publicField(this, "tfjs"); __publicField(this, "tfjs");
__publicField(this, "offscreen"); __publicField(this, "offscreen");
__publicField(this, "perfadd", false);
__publicField(this, "wasm", { __publicField(this, "wasm", {
supported: void 0, supported: void 0,
backend: void 0, backend: void 0,
@ -1322,7 +1323,7 @@ var model2;
var cached = []; var cached = [];
var skipped2 = Number.MAX_SAFE_INTEGER; var skipped2 = Number.MAX_SAFE_INTEGER;
var lastCount = 0; var lastCount = 0;
var last = 0; var lastTime = 0;
async function load2(config3) { async function load2(config3) {
var _a, _b; var _a, _b;
if (env.initial) if (env.initial)
@ -1341,7 +1342,9 @@ async function predict(image25, config3, idx, count2) {
var _a, _b; var _a, _b;
if (!model2) if (!model2)
return null; return null;
if (skipped2 < (((_a = config3.face.antispoof) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.antispoof) == null ? void 0 : _b.skipTime) || 0) <= now() - last && config3.skipFrame && lastCount === count2 && cached[idx]) { const skipTime = (((_a = config3.face.antispoof) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime;
const skipFrame = skipped2 < (((_b = config3.face.antispoof) == null ? void 0 : _b.skipFrames) || 0);
if (config3.skipAllowed && skipTime && skipFrame && lastCount === count2 && cached[idx]) {
skipped2++; skipped2++;
return cached[idx]; return cached[idx];
} }
@ -1352,7 +1355,7 @@ async function predict(image25, config3, idx, count2) {
const num = (await res.data())[0]; const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100; cached[idx] = Math.round(100 * num) / 100;
lastCount = count2; lastCount = count2;
last = now(); lastTime = now();
tf4.dispose([resize, res]); tf4.dispose([resize, res]);
resolve(cached[idx]); resolve(cached[idx]);
}); });
@ -4937,7 +4940,7 @@ var skipped3 = Number.MAX_SAFE_INTEGER;
var outputNodes; var outputNodes;
var cache = null; var cache = null;
var padding = [[0, 0], [0, 0], [0, 0], [0, 0]]; var padding = [[0, 0], [0, 0], [0, 0], [0, 0]];
var last2 = 0; var lastTime2 = 0;
async function loadDetect(config3) { async function loadDetect(config3) {
var _a, _b, _c; var _a, _b, _c;
if (env3.initial) if (env3.initial)
@ -5054,11 +5057,13 @@ async function detectParts(input, config3, outputSize2) {
} }
async function predict2(input, config3) { async function predict2(input, config3) {
const outputSize2 = [input.shape[2] || 0, input.shape[1] || 0]; const outputSize2 = [input.shape[2] || 0, input.shape[1] || 0];
if (skipped3 < (config3.body.skipFrames || 0) && (config3.body.skipTime || 0) <= now() - last2 && config3.skipFrame && cache !== null) { const skipTime = (config3.body.skipTime || 0) > now() - lastTime2;
const skipFrame = skipped3 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && cache !== null) {
skipped3++; skipped3++;
} else { } else {
cache = await detectParts(input, config3, outputSize2); cache = await detectParts(input, config3, outputSize2);
last2 = now(); lastTime2 = now();
skipped3 = 0; skipped3 = 0;
} }
if (cache) if (cache)
@ -5156,8 +5161,8 @@ var labels = [
// src/object/centernet.ts // src/object/centernet.ts
var model4; var model4;
var inputSize3 = 0; var inputSize3 = 0;
var last3 = []; var last = [];
var lastTime = 0; var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER; var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) { async function load4(config3) {
if (env.initial) if (env.initial)
@ -5222,21 +5227,23 @@ async function process3(res, outputShape, config3) {
return results; return results;
} }
async function predict3(input, config3) { async function predict3(input, config3) {
if (skipped4 < (config3.object.skipFrames || 0) && (config3.object.skipTime || 0) <= now() - lastTime && config3.skipFrame && last3.length > 0) { const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
skipped4++; skipped4++;
return last3; return last;
} }
skipped4 = 0; skipped4 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last3; return last;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [input.shape[2], input.shape[1]]; const outputSize2 = [input.shape[2], input.shape[1]];
const resize = tf8.image.resizeBilinear(input, [inputSize3, inputSize3]); const resize = tf8.image.resizeBilinear(input, [inputSize3, inputSize3]);
const objectT = config3.object.enabled ? model4 == null ? void 0 : model4.execute(resize, ["tower_0/detections"]) : null; const objectT = config3.object.enabled ? model4 == null ? void 0 : model4.execute(resize, ["tower_0/detections"]) : null;
lastTime = now(); lastTime3 = now();
tf8.dispose(resize); tf8.dispose(resize);
const obj = await process3(objectT, outputSize2, config3); const obj = await process3(objectT, outputSize2, config3);
last3 = obj; last = obj;
resolve(obj); resolve(obj);
}); });
} }
@ -5279,7 +5286,7 @@ var connected2 = {
// src/body/efficientpose.ts // src/body/efficientpose.ts
var model5; var model5;
var last4 = 0; var lastTime4 = 0;
var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} }; var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };
var skipped5 = Number.MAX_SAFE_INTEGER; var skipped5 = Number.MAX_SAFE_INTEGER;
async function load5(config3) { async function load5(config3) {
@ -5311,14 +5318,15 @@ function max2d(inputs, minScore) {
}); });
} }
async function predict4(image25, config3) { async function predict4(image25, config3) {
var _a; const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
if (skipped5 < (((_a = config3.body) == null ? void 0 : _a.skipFrames) || 0) && config3.skipFrame && Object.keys(cache2.keypoints).length > 0 && (config3.body.skipTime || 0) <= now() - last4) { const skipFrame = skipped5 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
skipped5++; skipped5++;
return [cache2]; return [cache2];
} }
skipped5 = 0; skipped5 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
var _a2; var _a;
const tensor3 = tf9.tidy(() => { const tensor3 = tf9.tidy(() => {
if (!(model5 == null ? void 0 : model5.inputs[0].shape)) if (!(model5 == null ? void 0 : model5.inputs[0].shape))
return null; return null;
@ -5330,7 +5338,7 @@ async function predict4(image25, config3) {
let resT; let resT;
if (config3.body.enabled) if (config3.body.enabled)
resT = await (model5 == null ? void 0 : model5.predict(tensor3)); resT = await (model5 == null ? void 0 : model5.predict(tensor3));
last4 = now(); lastTime4 = now();
tf9.dispose(tensor3); tf9.dispose(tensor3);
if (resT) { if (resT) {
cache2.keypoints.length = 0; cache2.keypoints.length = 0;
@ -5340,7 +5348,7 @@ async function predict4(image25, config3) {
tf9.dispose(squeeze8); tf9.dispose(squeeze8);
for (let id = 0; id < stack3.length; id++) { for (let id = 0; id < stack3.length; id++) {
const [x2, y2, partScore] = max2d(stack3[id], config3.body.minConfidence); const [x2, y2, partScore] = max2d(stack3[id], config3.body.minConfidence);
if (partScore > (((_a2 = config3.body) == null ? void 0 : _a2.minConfidence) || 0)) { if (partScore > (((_a = config3.body) == null ? void 0 : _a.minConfidence) || 0)) {
cache2.keypoints.push({ cache2.keypoints.push({
score: Math.round(100 * partScore) / 100, score: Math.round(100 * partScore) / 100,
part: kpt2[id], part: kpt2[id],
@ -5392,9 +5400,9 @@ async function predict4(image25, config3) {
var tf10 = __toModule(require_tfjs_esm()); var tf10 = __toModule(require_tfjs_esm());
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"]; var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model6; var model6;
var last5 = []; var last2 = [];
var lastCount2 = 0; var lastCount2 = 0;
var lastTime2 = 0; var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER; var skipped6 = Number.MAX_SAFE_INTEGER;
var rgb = [0.2989, 0.587, 0.114]; var rgb = [0.2989, 0.587, 0.114];
async function load6(config3) { async function load6(config3) {
@ -5415,9 +5423,11 @@ async function predict5(image25, config3, idx, count2) {
var _a, _b; var _a, _b;
if (!model6) if (!model6)
return null; return null;
if (skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) <= now() - lastTime2 && config3.skipFrame && lastCount2 === count2 && last5[idx] && last5[idx].length > 0) { const skipFrame = skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last2[idx] && last2[idx].length > 0) {
skipped6++; skipped6++;
return last5[idx]; return last2[idx];
} }
skipped6 = 0; skipped6 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -5440,7 +5450,7 @@ async function predict5(image25, config3, idx, count2) {
const normalize = tf10.tidy(() => tf10.mul(tf10.sub(grayscale, 0.5), 2)); const normalize = tf10.tidy(() => tf10.mul(tf10.sub(grayscale, 0.5), 2));
tf10.dispose(grayscale); tf10.dispose(grayscale);
const emotionT = await (model6 == null ? void 0 : model6.predict(normalize)); const emotionT = await (model6 == null ? void 0 : model6.predict(normalize));
lastTime2 = now(); lastTime5 = now();
const data = await emotionT.data(); const data = await emotionT.data();
tf10.dispose(emotionT); tf10.dispose(emotionT);
for (let i = 0; i < data.length; i++) { for (let i = 0; i < data.length; i++) {
@ -5450,7 +5460,7 @@ async function predict5(image25, config3, idx, count2) {
obj.sort((a, b) => b.score - a.score); obj.sort((a, b) => b.score - a.score);
tf10.dispose(normalize); tf10.dispose(normalize);
} }
last5[idx] = obj; last2[idx] = obj;
lastCount2 = count2; lastCount2 = count2;
resolve(obj); resolve(obj);
}); });
@ -5597,13 +5607,15 @@ var boxCache = [];
var model8 = null; var model8 = null;
var inputSize5 = 0; var inputSize5 = 0;
var skipped7 = Number.MAX_SAFE_INTEGER; var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime3 = 0; var lastTime6 = 0;
var detectedFaces = 0; var detectedFaces = 0;
async function predict6(input, config3) { async function predict6(input, config3) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
if (!config3.skipFrame || (detectedFaces !== ((_a = config3.face.detector) == null ? void 0 : _a.maxDetected) || !((_b = config3.face.mesh) == null ? void 0 : _b.enabled)) && (skipped7 > (((_c = config3.face.detector) == null ? void 0 : _c.skipFrames) || 0) && (((_d = config3.face.description) == null ? void 0 : _d.skipTime) || 0) <= now() - lastTime3)) { const skipTime = (((_a = config3.face.detector) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes2 = await getBoxes(input, config3); const newBoxes2 = await getBoxes(input, config3);
lastTime3 = now(); lastTime6 = now();
boxCache = []; boxCache = [];
for (const possible of newBoxes2.boxes) { for (const possible of newBoxes2.boxes) {
const startPoint = await possible.box.startPoint.data(); const startPoint = await possible.box.startPoint.data();
@ -5639,16 +5651,16 @@ async function predict6(input, config3) {
faceScore: 0, faceScore: 0,
annotations: {} annotations: {}
}; };
if (((_e = config3.face.detector) == null ? void 0 : _e.rotation) && ((_f = config3.face.mesh) == null ? void 0 : _f.enabled) && env.kernels.includes("rotatewithoffset")) { if (((_c = config3.face.detector) == null ? void 0 : _c.rotation) && ((_d = config3.face.mesh) == null ? void 0 : _d.enabled) && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else { } else {
rotationMatrix = IDENTITY_MATRIX; rotationMatrix = IDENTITY_MATRIX;
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, ((_g = config3.face.mesh) == null ? void 0 : _g.enabled) ? [inputSize5, inputSize5] : [size(), size()]); const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
face5.tensor = tf12.div(cut, 255); face5.tensor = tf12.div(cut, 255);
tf12.dispose(cut); tf12.dispose(cut);
} }
face5.boxScore = Math.round(100 * box4.confidence) / 100; face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!((_h = config3.face.mesh) == null ? void 0 : _h.enabled)) { if (!((_f = config3.face.mesh) == null ? void 0 : _f.enabled)) {
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * box4.confidence || 0) / 100; face5.score = Math.round(100 * box4.confidence || 0) / 100;
@ -5671,17 +5683,17 @@ async function predict6(input, config3) {
let rawCoords = await coordsReshaped.array(); let rawCoords = await coordsReshaped.array();
tf12.dispose(contourCoords); tf12.dispose(contourCoords);
tf12.dispose(coordsReshaped); tf12.dispose(coordsReshaped);
if (faceConfidence < (((_i = config3.face.detector) == null ? void 0 : _i.minConfidence) || 1)) { if (faceConfidence < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
box4.confidence = faceConfidence; box4.confidence = faceConfidence;
} else { } else {
if ((_j = config3.face.iris) == null ? void 0 : _j.enabled) if ((_h = config3.face.iris) == null ? void 0 : _h.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5); rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5); face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]); face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence }; box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]); face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (((_k = config3.face.detector) == null ? void 0 : _k.rotation) && config3.face.mesh.enabled && ((_l = config3.face.description) == null ? void 0 : _l.enabled) && env.kernels.includes("rotatewithoffset")) { if (((_i = config3.face.detector) == null ? void 0 : _i.rotation) && config3.face.mesh.enabled && ((_j = config3.face.description) == null ? void 0 : _j.enabled) && env.kernels.includes("rotatewithoffset")) {
tf12.dispose(face5.tensor); tf12.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} }
@ -5695,7 +5707,7 @@ async function predict6(input, config3) {
faces.push(face5); faces.push(face5);
newBoxes.push(box4); newBoxes.push(box4);
} }
if ((_m = config3.face.mesh) == null ? void 0 : _m.enabled) if ((_k = config3.face.mesh) == null ? void 0 : _k.enabled)
boxCache = newBoxes.filter((a) => { boxCache = newBoxes.filter((a) => {
var _a2; var _a2;
return a.confidence > (((_a2 = config3.face.detector) == null ? void 0 : _a2.minConfidence) || 0); return a.confidence > (((_a2 = config3.face.detector) == null ? void 0 : _a2.minConfidence) || 0);
@ -5726,8 +5738,8 @@ var uvmap = UV468;
// src/face/faceres.ts // src/face/faceres.ts
var tf13 = __toModule(require_tfjs_esm()); var tf13 = __toModule(require_tfjs_esm());
var model9; var model9;
var last6 = []; var last3 = [];
var lastTime4 = 0; var lastTime7 = 0;
var lastCount3 = 0; var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER; var skipped8 = Number.MAX_SAFE_INTEGER;
async function load9(config3) { async function load9(config3) {
@ -5763,9 +5775,11 @@ async function predict7(image25, config3, idx, count2) {
var _a, _b, _c, _d; var _a, _b, _c, _d;
if (!model9) if (!model9)
return null; return null;
if (skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) <= now() - lastTime4 && config3.skipFrame && lastCount3 === count2 && ((_c = last6[idx]) == null ? void 0 : _c.age) && ((_d = last6[idx]) == null ? void 0 : _d.age) > 0) { const skipFrame = skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last3[idx]) == null ? void 0 : _c.age) && ((_d = last3[idx]) == null ? void 0 : _d.age) > 0) {
skipped8++; skipped8++;
return last6[idx]; return last3[idx];
} }
skipped8 = 0; skipped8 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -5779,7 +5793,7 @@ async function predict7(image25, config3, idx, count2) {
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) { if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
const enhanced = enhance(image25); const enhanced = enhance(image25);
const resT = await (model9 == null ? void 0 : model9.predict(enhanced)); const resT = await (model9 == null ? void 0 : model9.predict(enhanced));
lastTime4 = now(); lastTime7 = now();
tf13.dispose(enhanced); tf13.dispose(enhanced);
const genderT = await resT.find((t) => t.shape[1] === 1); const genderT = await resT.find((t) => t.shape[1] === 1);
const gender = await genderT.data(); const gender = await genderT.data();
@ -5799,7 +5813,7 @@ async function predict7(image25, config3, idx, count2) {
obj.descriptor = Array.from(descriptor); obj.descriptor = Array.from(descriptor);
resT.forEach((t) => tf13.dispose(t)); resT.forEach((t) => tf13.dispose(t));
} }
last6[idx] = obj; last3[idx] = obj;
lastCount3 = count2; lastCount3 = count2;
resolve(obj); resolve(obj);
}); });
@ -8954,7 +8968,7 @@ var handBoxEnlargeFactor = 1.65;
var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2]; var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
var palmLandmarksPalmBase = 0; var palmLandmarksPalmBase = 0;
var palmLandmarksMiddleFingerBase = 2; var palmLandmarksMiddleFingerBase = 2;
var lastTime5 = 0; var lastTime8 = 0;
var HandPipeline = class { var HandPipeline = class {
constructor(handDetector, handPoseModel2) { constructor(handDetector, handPoseModel2) {
__publicField(this, "handDetector"); __publicField(this, "handDetector");
@ -8967,7 +8981,7 @@ var HandPipeline = class {
this.handPoseModel = handPoseModel2; this.handPoseModel = handPoseModel2;
this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0; this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0;
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = Number.MAX_SAFE_INTEGER;
this.detectedHands = 0; this.detectedHands = 0;
} }
calculateLandmarksBoundingBox(landmarks) { calculateLandmarksBoundingBox(landmarks) {
@ -9019,11 +9033,13 @@ var HandPipeline = class {
async estimateHands(image25, config3) { async estimateHands(image25, config3) {
let useFreshBox = false; let useFreshBox = false;
let boxes; let boxes;
if (this.skipped === 0 || this.skipped > config3.hand.skipFrames && (config3.hand.skipTime || 0) <= now() - lastTime5 || !config3.hand.landmarks || !config3.skipFrame) { const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
boxes = await this.handDetector.estimateHandBounds(image25, config3); boxes = await this.handDetector.estimateHandBounds(image25, config3);
this.skipped = 0; this.skipped = 0;
} }
if (config3.skipFrame) if (config3.skipAllowed)
this.skipped++; this.skipped++;
if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config3.hand.maxDetected || !config3.hand.landmarks)) { if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config3.hand.maxDetected || !config3.hand.landmarks)) {
this.detectedHands = 0; this.detectedHands = 0;
@ -9048,7 +9064,7 @@ var HandPipeline = class {
tf16.dispose(croppedInput); tf16.dispose(croppedInput);
tf16.dispose(rotatedImage); tf16.dispose(rotatedImage);
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage); const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
lastTime5 = now(); lastTime8 = now();
tf16.dispose(handImage); tf16.dispose(handImage);
const confidence = (await confidenceT.data())[0]; const confidence = (await confidenceT.data())[0];
tf16.dispose(confidenceT); tf16.dispose(confidenceT);
@ -9617,8 +9633,8 @@ var faceIndex = 4;
var boxExpandFact = 1.6; var boxExpandFact = 1.6;
var maxDetectorResolution = 512; var maxDetectorResolution = 512;
var detectorExpandFact = 1.4; var detectorExpandFact = 1.4;
var skipped9 = 0; var skipped9 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0; var lastTime9 = 0;
var outputSize = [0, 0]; var outputSize = [0, 0];
var cache3 = { var cache3 = {
boxes: [], boxes: [],
@ -9758,17 +9774,21 @@ async function predict9(input, config3) {
return []; return [];
outputSize = [input.shape[2] || 0, input.shape[1] || 0]; outputSize = [input.shape[2] || 0, input.shape[1] || 0];
skipped9++; skipped9++;
if (config3.skipFrame && skipped9 <= (config3.hand.skipFrames || 0) && (config3.hand.skipTime || 0) <= now() - lastTime6) { const skipTime = (config3.hand.skipTime || 0) > now() - lastTime9;
const skipFrame = skipped9 < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
return cache3.hands; return cache3.hands;
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (config3.skipFrame && cache3.hands.length === config3.hand.maxDetected) { const skipTimeExtended = 3 * (config3.hand.skipTime || 0) > now() - lastTime9;
const skipFrameExtended = skipped9 < 3 * (config3.hand.skipFrames || 0);
if (config3.skipAllowed && cache3.hands.length === config3.hand.maxDetected) {
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
} else if (config3.skipFrame && skipped9 < 3 * (config3.hand.skipFrames || 0) && (config3.hand.skipTime || 0) <= 3 * (now() - lastTime6) && cache3.hands.length > 0) { } else if (config3.skipAllowed && skipTimeExtended && skipFrameExtended && cache3.hands.length > 0) {
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
} else { } else {
cache3.boxes = await detectHands(input, config3); cache3.boxes = await detectHands(input, config3);
lastTime6 = now(); lastTime9 = now();
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
skipped9 = 0; skipped9 = 0;
} }
@ -10067,10 +10087,12 @@ async function parseMultiPose(res, config3, image25, inputBox) {
async function predict10(input, config3) { async function predict10(input, config3) {
if (!model10 || !(model10 == null ? void 0 : model10.inputs[0].shape)) if (!model10 || !(model10 == null ? void 0 : model10.inputs[0].shape))
return []; return [];
if (!config3.skipFrame) if (!config3.skipAllowed)
cache5.boxes.length = 0; cache5.boxes.length = 0;
skipped10++; skipped10++;
if (config3.skipFrame && (skipped10 <= (config3.body.skipFrames || 0) && (config3.body.skipTime || 0) <= now() - cache5.last)) { const skipTime = (config3.body.skipTime || 0) > now() - cache5.last;
const skipFrame = skipped10 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
return cache5.bodies; return cache5.bodies;
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -10093,8 +10115,8 @@ async function predict10(input, config3) {
// src/object/nanodet.ts // src/object/nanodet.ts
var tf21 = __toModule(require_tfjs_esm()); var tf21 = __toModule(require_tfjs_esm());
var model11; var model11;
var last7 = []; var last4 = [];
var lastTime7 = 0; var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER; var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5; var scaleBox = 2.5;
async function load12(config3) { async function load12(config3) {
@ -10174,13 +10196,15 @@ async function process4(res, inputSize8, outputShape, config3) {
return results; return results;
} }
async function predict11(image25, config3) { async function predict11(image25, config3) {
if (skipped11 < (config3.object.skipFrames || 0) && (config3.object.skipTime || 0) <= now() - lastTime7 && config3.skipFrame && last7.length > 0) { const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
skipped11++; skipped11++;
return last7; return last4;
} }
skipped11 = 0; skipped11 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last7; return last4;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]]; const outputSize2 = [image25.shape[2], image25.shape[1]];
const resize = tf21.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false); const resize = tf21.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
@ -10191,10 +10215,10 @@ async function predict11(image25, config3) {
let objectT; let objectT;
if (config3.object.enabled) if (config3.object.enabled)
objectT = await model11.predict(transpose); objectT = await model11.predict(transpose);
lastTime7 = now(); lastTime10 = now();
tf21.dispose(transpose); tf21.dispose(transpose);
const obj = await process4(objectT, model11.inputSize, outputSize2, config3); const obj = await process4(objectT, model11.inputSize, outputSize2, config3);
last7 = obj; last4 = obj;
resolve(obj); resolve(obj);
}); });
} }
@ -11394,7 +11418,7 @@ async function canvas2(input, output) {
async function all(inCanvas2, result, drawOptions) { async function all(inCanvas2, result, drawOptions) {
if (!result || !result.performance || !result || !inCanvas2) if (!result || !result.performance || !result || !inCanvas2)
return null; return null;
const timestamp = now(); const timeStamp = now();
const localOptions = mergeDeep(options2, drawOptions); const localOptions = mergeDeep(options2, drawOptions);
const promise = Promise.all([ const promise = Promise.all([
face(inCanvas2, result.face, localOptions), face(inCanvas2, result.face, localOptions),
@ -11403,7 +11427,7 @@ async function all(inCanvas2, result, drawOptions) {
object(inCanvas2, result.object, localOptions), object(inCanvas2, result.object, localOptions),
gesture(inCanvas2, result.gesture, localOptions) gesture(inCanvas2, result.gesture, localOptions)
]); ]);
result.performance.draw = Math.trunc(now() - timestamp); result.performance.draw = env.perfadd ? (result.performance.draw || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
return promise; return promise;
} }
@ -11531,7 +11555,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:face"; parent.state = "run:face";
timeStamp = now(); timeStamp = now();
const faces = await predict6(input, parent.config); const faces = await predict6(input, parent.config);
parent.performance.face = Math.trunc(now() - timeStamp); parent.performance.face = env.perfadd ? (parent.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (!input.shape || input.shape.length !== 4) if (!input.shape || input.shape.length !== 4)
return []; return [];
if (!faces) if (!faces)
@ -11550,7 +11574,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:emotion"; parent.state = "run:emotion";
timeStamp = now(); timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict5(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null; emotionRes = parent.config.face.emotion.enabled ? await predict5(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null;
parent.performance.emotion = Math.trunc(now() - timeStamp); parent.performance.emotion = env.perfadd ? (parent.performance.emotion || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End Emotion:"); parent.analyze("End Emotion:");
parent.analyze("Start AntiSpoof:"); parent.analyze("Start AntiSpoof:");
@ -11560,7 +11584,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:antispoof"; parent.state = "run:antispoof";
timeStamp = now(); timeStamp = now();
antispoofRes = parent.config.face.antispoof.enabled ? await predict(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null; antispoofRes = parent.config.face.antispoof.enabled ? await predict(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null;
parent.performance.antispoof = Math.trunc(now() - timeStamp); parent.performance.antispoof = env.perfadd ? (parent.performance.antispoof || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End AntiSpoof:"); parent.analyze("End AntiSpoof:");
parent.analyze("Start Description:"); parent.analyze("Start Description:");
@ -11570,7 +11594,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:description"; parent.state = "run:description";
timeStamp = now(); timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict7(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null; descRes = parent.config.face.description.enabled ? await predict7(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null;
parent.performance.embedding = Math.trunc(now() - timeStamp); parent.performance.embedding = env.perfadd ? (parent.performance.embedding || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End Description:"); parent.analyze("End Description:");
if (parent.config.async) { if (parent.config.async) {
@ -12889,7 +12913,7 @@ var Human = class {
const count2 = Object.values(this.models).filter((model14) => model14).length; const count2 = Object.values(this.models).filter((model14) => model14).length;
if (userConfig) if (userConfig)
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (env.initial) { if (this.env.initial) {
if (this.config.debug) if (this.config.debug)
log(`version: ${this.version}`); log(`version: ${this.version}`);
if (this.config.debug) if (this.config.debug)
@ -12905,9 +12929,9 @@ var Human = class {
} }
} }
await load15(this); await load15(this);
if (env.initial && this.config.debug) if (this.env.initial && this.config.debug)
log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors"); log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors");
env.initial = false; this.env.initial = false;
const loaded = Object.values(this.models).filter((model14) => model14).length; const loaded = Object.values(this.models).filter((model14) => model14).length;
if (loaded !== count2) { if (loaded !== count2) {
await validate2(this); await validate2(this);
@ -12915,7 +12939,7 @@ var Human = class {
} }
const current = Math.trunc(now() - timeStamp); const current = Math.trunc(now() - timeStamp);
if (current > (this.performance.load || 0)) if (current > (this.performance.load || 0))
this.performance.load = current; this.performance.load = this.env.perfadd ? (this.performance.load || 0) + current : current;
} }
next(result = this.result) { next(result = this.result) {
return calc2(result, this.config); return calc2(result, this.config);
@ -12929,7 +12953,6 @@ var Human = class {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
this.state = "config"; this.state = "config";
let timeStamp; let timeStamp;
let elapsedTime;
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
this.state = "check"; this.state = "check";
const error = __privateGet(this, _sanity).call(this, input); const error = __privateGet(this, _sanity).call(this, input);
@ -12944,7 +12967,7 @@ var Human = class {
this.state = "image"; this.state = "image";
const img = process2(input, this.config); const img = process2(input, this.config);
this.process = img; this.process = img;
this.performance.image = Math.trunc(now() - timeStamp); this.performance.image = this.env.perfadd ? (this.performance.image || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:"); this.analyze("Get Image:");
if (!img.tensor) { if (!img.tensor) {
if (this.config.debug) if (this.config.debug)
@ -12954,15 +12977,15 @@ var Human = class {
} }
this.emit("image"); this.emit("image");
timeStamp = now(); timeStamp = now();
this.config.skipFrame = await skip(this.config, img.tensor); this.config.skipAllowed = await skip(this.config, img.tensor);
if (!this.performance.frames) if (!this.performance.frames)
this.performance.frames = 0; this.performance.frames = 0;
if (!this.performance.cached) if (!this.performance.cached)
this.performance.cached = 0; this.performance.cached = 0;
this.performance.frames++; this.performance.frames++;
if (this.config.skipFrame) if (this.config.skipAllowed)
this.performance.cached++; this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp); this.performance.changed = this.env.perfadd ? (this.performance.changed || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:"); this.analyze("Check Changed:");
let faceRes = []; let faceRes = [];
let bodyRes = []; let bodyRes = [];
@ -12976,9 +12999,7 @@ var Human = class {
} else { } else {
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.face = this.env.perfadd ? (this.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.face = elapsedTime;
} }
if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1)) if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1))
faceRes = await faceRes; faceRes = await faceRes;
@ -13006,9 +13027,7 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict4(img.tensor, bodyConfig) : []; bodyRes = this.config.body.enabled ? await predict4(img.tensor, bodyConfig) : [];
else if ((_h = this.config.body.modelPath) == null ? void 0 : _h.includes("movenet")) else if ((_h = this.config.body.modelPath) == null ? void 0 : _h.includes("movenet"))
bodyRes = this.config.body.enabled ? await predict10(img.tensor, bodyConfig) : []; bodyRes = this.config.body.enabled ? await predict10(img.tensor, bodyConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.body = this.env.perfadd ? (this.performance.body || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.body = elapsedTime;
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
@ -13027,9 +13046,7 @@ var Human = class {
handRes = this.config.hand.enabled ? await predict8(img.tensor, handConfig) : []; handRes = this.config.hand.enabled ? await predict8(img.tensor, handConfig) : [];
else if ((_p = (_o = this.config.hand.detector) == null ? void 0 : _o.modelPath) == null ? void 0 : _p.includes("handtrack")) else if ((_p = (_o = this.config.hand.detector) == null ? void 0 : _o.modelPath) == null ? void 0 : _p.includes("handtrack"))
handRes = this.config.hand.enabled ? await predict9(img.tensor, handConfig) : []; handRes = this.config.hand.enabled ? await predict9(img.tensor, handConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.hand = this.env.perfadd ? (this.performance.hand || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.hand = elapsedTime;
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
@ -13047,9 +13064,7 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict11(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict11(img.tensor, this.config) : [];
else if ((_t = this.config.object.modelPath) == null ? void 0 : _t.includes("centernet")) else if ((_t = this.config.object.modelPath) == null ? void 0 : _t.includes("centernet"))
objectRes = this.config.object.enabled ? await predict3(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict3(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.object = this.env.perfadd ? (this.performance.object || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
this.state = "detect:await"; this.state = "detect:await";
@ -13061,7 +13076,7 @@ var Human = class {
timeStamp = now(); timeStamp = now();
gestureRes = [...face2(faceRes), ...body2(bodyRes), ...hand2(handRes), ...iris3(faceRes)]; gestureRes = [...face2(faceRes), ...body2(bodyRes), ...hand2(handRes), ...iris3(faceRes)];
if (!this.config.async) if (!this.config.async)
this.performance.gesture = Math.trunc(now() - timeStamp); this.performance.gesture = this.env.perfadd ? (this.performance.gesture || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
else if (this.performance.gesture) else if (this.performance.gesture)
delete this.performance.gesture; delete this.performance.gesture;
} }

223
dist/human.node.js vendored
View File

@ -152,8 +152,8 @@ var config = {
debug: true, debug: true,
async: true, async: true,
warmup: "full", warmup: "full",
cacheSensitivity: 0.75, cacheSensitivity: 0.7,
skipFrame: false, skipAllowed: false,
filter: { filter: {
enabled: true, enabled: true,
width: 0, width: 0,
@ -183,8 +183,8 @@ var config = {
modelPath: "blazeface.json", modelPath: "blazeface.json",
rotation: true, rotation: true,
maxDetected: 1, maxDetected: 1,
skipFrames: 11, skipFrames: 99,
skipTime: 2e3, skipTime: 2500,
minConfidence: 0.2, minConfidence: 0.2,
iouThreshold: 0.1, iouThreshold: 0.1,
return: false return: false
@ -200,21 +200,21 @@ var config = {
emotion: { emotion: {
enabled: true, enabled: true,
minConfidence: 0.1, minConfidence: 0.1,
skipFrames: 12, skipFrames: 99,
skipTime: 2e3, skipTime: 1500,
modelPath: "emotion.json" modelPath: "emotion.json"
}, },
description: { description: {
enabled: true, enabled: true,
modelPath: "faceres.json", modelPath: "faceres.json",
skipFrames: 13, skipFrames: 99,
skipTime: 2e3, skipTime: 3e3,
minConfidence: 0.1 minConfidence: 0.1
}, },
antispoof: { antispoof: {
enabled: false, enabled: false,
skipFrames: 14, skipFrames: 99,
skipTime: 2e3, skipTime: 4e3,
modelPath: "antispoof.json" modelPath: "antispoof.json"
} }
}, },
@ -227,12 +227,12 @@ var config = {
maxDetected: -1, maxDetected: -1,
minConfidence: 0.3, minConfidence: 0.3,
skipFrames: 1, skipFrames: 1,
skipTime: 2e3 skipTime: 200
}, },
hand: { hand: {
enabled: true, enabled: true,
rotation: true, rotation: true,
skipFrames: 2, skipFrames: 99,
skipTime: 2e3, skipTime: 2e3,
minConfidence: 0.5, minConfidence: 0.5,
iouThreshold: 0.2, iouThreshold: 0.2,
@ -251,8 +251,8 @@ var config = {
minConfidence: 0.2, minConfidence: 0.2,
iouThreshold: 0.4, iouThreshold: 0.4,
maxDetected: 10, maxDetected: 10,
skipFrames: 15, skipFrames: 99,
skipTime: 2e3 skipTime: 1e3
}, },
segmentation: { segmentation: {
enabled: false, enabled: false,
@ -1190,6 +1190,7 @@ var Env = class {
__publicField(this, "filter"); __publicField(this, "filter");
__publicField(this, "tfjs"); __publicField(this, "tfjs");
__publicField(this, "offscreen"); __publicField(this, "offscreen");
__publicField(this, "perfadd", false);
__publicField(this, "wasm", { __publicField(this, "wasm", {
supported: void 0, supported: void 0,
backend: void 0, backend: void 0,
@ -1321,7 +1322,7 @@ var model2;
var cached = []; var cached = [];
var skipped2 = Number.MAX_SAFE_INTEGER; var skipped2 = Number.MAX_SAFE_INTEGER;
var lastCount = 0; var lastCount = 0;
var last = 0; var lastTime = 0;
async function load2(config3) { async function load2(config3) {
var _a, _b; var _a, _b;
if (env.initial) if (env.initial)
@ -1340,7 +1341,9 @@ async function predict(image25, config3, idx, count2) {
var _a, _b; var _a, _b;
if (!model2) if (!model2)
return null; return null;
if (skipped2 < (((_a = config3.face.antispoof) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.antispoof) == null ? void 0 : _b.skipTime) || 0) <= now() - last && config3.skipFrame && lastCount === count2 && cached[idx]) { const skipTime = (((_a = config3.face.antispoof) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime;
const skipFrame = skipped2 < (((_b = config3.face.antispoof) == null ? void 0 : _b.skipFrames) || 0);
if (config3.skipAllowed && skipTime && skipFrame && lastCount === count2 && cached[idx]) {
skipped2++; skipped2++;
return cached[idx]; return cached[idx];
} }
@ -1351,7 +1354,7 @@ async function predict(image25, config3, idx, count2) {
const num = (await res.data())[0]; const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100; cached[idx] = Math.round(100 * num) / 100;
lastCount = count2; lastCount = count2;
last = now(); lastTime = now();
tf4.dispose([resize, res]); tf4.dispose([resize, res]);
resolve(cached[idx]); resolve(cached[idx]);
}); });
@ -4936,7 +4939,7 @@ var skipped3 = Number.MAX_SAFE_INTEGER;
var outputNodes; var outputNodes;
var cache = null; var cache = null;
var padding = [[0, 0], [0, 0], [0, 0], [0, 0]]; var padding = [[0, 0], [0, 0], [0, 0], [0, 0]];
var last2 = 0; var lastTime2 = 0;
async function loadDetect(config3) { async function loadDetect(config3) {
var _a, _b, _c; var _a, _b, _c;
if (env3.initial) if (env3.initial)
@ -5053,11 +5056,13 @@ async function detectParts(input, config3, outputSize2) {
} }
async function predict2(input, config3) { async function predict2(input, config3) {
const outputSize2 = [input.shape[2] || 0, input.shape[1] || 0]; const outputSize2 = [input.shape[2] || 0, input.shape[1] || 0];
if (skipped3 < (config3.body.skipFrames || 0) && (config3.body.skipTime || 0) <= now() - last2 && config3.skipFrame && cache !== null) { const skipTime = (config3.body.skipTime || 0) > now() - lastTime2;
const skipFrame = skipped3 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && cache !== null) {
skipped3++; skipped3++;
} else { } else {
cache = await detectParts(input, config3, outputSize2); cache = await detectParts(input, config3, outputSize2);
last2 = now(); lastTime2 = now();
skipped3 = 0; skipped3 = 0;
} }
if (cache) if (cache)
@ -5155,8 +5160,8 @@ var labels = [
// src/object/centernet.ts // src/object/centernet.ts
var model4; var model4;
var inputSize3 = 0; var inputSize3 = 0;
var last3 = []; var last = [];
var lastTime = 0; var lastTime3 = 0;
var skipped4 = Number.MAX_SAFE_INTEGER; var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) { async function load4(config3) {
if (env.initial) if (env.initial)
@ -5221,21 +5226,23 @@ async function process3(res, outputShape, config3) {
return results; return results;
} }
async function predict3(input, config3) { async function predict3(input, config3) {
if (skipped4 < (config3.object.skipFrames || 0) && (config3.object.skipTime || 0) <= now() - lastTime && config3.skipFrame && last3.length > 0) { const skipTime = (config3.object.skipTime || 0) > now() - lastTime3;
const skipFrame = skipped4 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last.length > 0) {
skipped4++; skipped4++;
return last3; return last;
} }
skipped4 = 0; skipped4 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last3; return last;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [input.shape[2], input.shape[1]]; const outputSize2 = [input.shape[2], input.shape[1]];
const resize = tf8.image.resizeBilinear(input, [inputSize3, inputSize3]); const resize = tf8.image.resizeBilinear(input, [inputSize3, inputSize3]);
const objectT = config3.object.enabled ? model4 == null ? void 0 : model4.execute(resize, ["tower_0/detections"]) : null; const objectT = config3.object.enabled ? model4 == null ? void 0 : model4.execute(resize, ["tower_0/detections"]) : null;
lastTime = now(); lastTime3 = now();
tf8.dispose(resize); tf8.dispose(resize);
const obj = await process3(objectT, outputSize2, config3); const obj = await process3(objectT, outputSize2, config3);
last3 = obj; last = obj;
resolve(obj); resolve(obj);
}); });
} }
@ -5278,7 +5285,7 @@ var connected2 = {
// src/body/efficientpose.ts // src/body/efficientpose.ts
var model5; var model5;
var last4 = 0; var lastTime4 = 0;
var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} }; var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };
var skipped5 = Number.MAX_SAFE_INTEGER; var skipped5 = Number.MAX_SAFE_INTEGER;
async function load5(config3) { async function load5(config3) {
@ -5310,14 +5317,15 @@ function max2d(inputs, minScore) {
}); });
} }
async function predict4(image25, config3) { async function predict4(image25, config3) {
var _a; const skipTime = (config3.body.skipTime || 0) > now() - lastTime4;
if (skipped5 < (((_a = config3.body) == null ? void 0 : _a.skipFrames) || 0) && config3.skipFrame && Object.keys(cache2.keypoints).length > 0 && (config3.body.skipTime || 0) <= now() - last4) { const skipFrame = skipped5 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && Object.keys(cache2.keypoints).length > 0) {
skipped5++; skipped5++;
return [cache2]; return [cache2];
} }
skipped5 = 0; skipped5 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
var _a2; var _a;
const tensor3 = tf9.tidy(() => { const tensor3 = tf9.tidy(() => {
if (!(model5 == null ? void 0 : model5.inputs[0].shape)) if (!(model5 == null ? void 0 : model5.inputs[0].shape))
return null; return null;
@ -5329,7 +5337,7 @@ async function predict4(image25, config3) {
let resT; let resT;
if (config3.body.enabled) if (config3.body.enabled)
resT = await (model5 == null ? void 0 : model5.predict(tensor3)); resT = await (model5 == null ? void 0 : model5.predict(tensor3));
last4 = now(); lastTime4 = now();
tf9.dispose(tensor3); tf9.dispose(tensor3);
if (resT) { if (resT) {
cache2.keypoints.length = 0; cache2.keypoints.length = 0;
@ -5339,7 +5347,7 @@ async function predict4(image25, config3) {
tf9.dispose(squeeze8); tf9.dispose(squeeze8);
for (let id = 0; id < stack3.length; id++) { for (let id = 0; id < stack3.length; id++) {
const [x2, y2, partScore] = max2d(stack3[id], config3.body.minConfidence); const [x2, y2, partScore] = max2d(stack3[id], config3.body.minConfidence);
if (partScore > (((_a2 = config3.body) == null ? void 0 : _a2.minConfidence) || 0)) { if (partScore > (((_a = config3.body) == null ? void 0 : _a.minConfidence) || 0)) {
cache2.keypoints.push({ cache2.keypoints.push({
score: Math.round(100 * partScore) / 100, score: Math.round(100 * partScore) / 100,
part: kpt2[id], part: kpt2[id],
@ -5391,9 +5399,9 @@ async function predict4(image25, config3) {
var tf10 = __toModule(require_tfjs_esm()); var tf10 = __toModule(require_tfjs_esm());
var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"]; var annotations = ["angry", "disgust", "fear", "happy", "sad", "surprise", "neutral"];
var model6; var model6;
var last5 = []; var last2 = [];
var lastCount2 = 0; var lastCount2 = 0;
var lastTime2 = 0; var lastTime5 = 0;
var skipped6 = Number.MAX_SAFE_INTEGER; var skipped6 = Number.MAX_SAFE_INTEGER;
var rgb = [0.2989, 0.587, 0.114]; var rgb = [0.2989, 0.587, 0.114];
async function load6(config3) { async function load6(config3) {
@ -5414,9 +5422,11 @@ async function predict5(image25, config3, idx, count2) {
var _a, _b; var _a, _b;
if (!model6) if (!model6)
return null; return null;
if (skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) <= now() - lastTime2 && config3.skipFrame && lastCount2 === count2 && last5[idx] && last5[idx].length > 0) { const skipFrame = skipped6 < (((_a = config3.face.emotion) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.emotion) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime5;
if (config3.skipAllowed && skipTime && skipFrame && lastCount2 === count2 && last2[idx] && last2[idx].length > 0) {
skipped6++; skipped6++;
return last5[idx]; return last2[idx];
} }
skipped6 = 0; skipped6 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -5439,7 +5449,7 @@ async function predict5(image25, config3, idx, count2) {
const normalize = tf10.tidy(() => tf10.mul(tf10.sub(grayscale, 0.5), 2)); const normalize = tf10.tidy(() => tf10.mul(tf10.sub(grayscale, 0.5), 2));
tf10.dispose(grayscale); tf10.dispose(grayscale);
const emotionT = await (model6 == null ? void 0 : model6.predict(normalize)); const emotionT = await (model6 == null ? void 0 : model6.predict(normalize));
lastTime2 = now(); lastTime5 = now();
const data = await emotionT.data(); const data = await emotionT.data();
tf10.dispose(emotionT); tf10.dispose(emotionT);
for (let i = 0; i < data.length; i++) { for (let i = 0; i < data.length; i++) {
@ -5449,7 +5459,7 @@ async function predict5(image25, config3, idx, count2) {
obj.sort((a, b) => b.score - a.score); obj.sort((a, b) => b.score - a.score);
tf10.dispose(normalize); tf10.dispose(normalize);
} }
last5[idx] = obj; last2[idx] = obj;
lastCount2 = count2; lastCount2 = count2;
resolve(obj); resolve(obj);
}); });
@ -5596,13 +5606,15 @@ var boxCache = [];
var model8 = null; var model8 = null;
var inputSize5 = 0; var inputSize5 = 0;
var skipped7 = Number.MAX_SAFE_INTEGER; var skipped7 = Number.MAX_SAFE_INTEGER;
var lastTime3 = 0; var lastTime6 = 0;
var detectedFaces = 0; var detectedFaces = 0;
async function predict6(input, config3) { async function predict6(input, config3) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
if (!config3.skipFrame || (detectedFaces !== ((_a = config3.face.detector) == null ? void 0 : _a.maxDetected) || !((_b = config3.face.mesh) == null ? void 0 : _b.enabled)) && (skipped7 > (((_c = config3.face.detector) == null ? void 0 : _c.skipFrames) || 0) && (((_d = config3.face.description) == null ? void 0 : _d.skipTime) || 0) <= now() - lastTime3)) { const skipTime = (((_a = config3.face.detector) == null ? void 0 : _a.skipTime) || 0) > now() - lastTime6;
const skipFrame = skipped7 < (((_b = config3.face.detector) == null ? void 0 : _b.skipFrames) || 0);
if (!config3.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes2 = await getBoxes(input, config3); const newBoxes2 = await getBoxes(input, config3);
lastTime3 = now(); lastTime6 = now();
boxCache = []; boxCache = [];
for (const possible of newBoxes2.boxes) { for (const possible of newBoxes2.boxes) {
const startPoint = await possible.box.startPoint.data(); const startPoint = await possible.box.startPoint.data();
@ -5638,16 +5650,16 @@ async function predict6(input, config3) {
faceScore: 0, faceScore: 0,
annotations: {} annotations: {}
}; };
if (((_e = config3.face.detector) == null ? void 0 : _e.rotation) && ((_f = config3.face.mesh) == null ? void 0 : _f.enabled) && env.kernels.includes("rotatewithoffset")) { if (((_c = config3.face.detector) == null ? void 0 : _c.rotation) && ((_d = config3.face.mesh) == null ? void 0 : _d.enabled) && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else { } else {
rotationMatrix = IDENTITY_MATRIX; rotationMatrix = IDENTITY_MATRIX;
const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, ((_g = config3.face.mesh) == null ? void 0 : _g.enabled) ? [inputSize5, inputSize5] : [size(), size()]); const cut = cutBoxFromImageAndResize({ startPoint: box4.startPoint, endPoint: box4.endPoint }, input, ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) ? [inputSize5, inputSize5] : [size(), size()]);
face5.tensor = tf12.div(cut, 255); face5.tensor = tf12.div(cut, 255);
tf12.dispose(cut); tf12.dispose(cut);
} }
face5.boxScore = Math.round(100 * box4.confidence) / 100; face5.boxScore = Math.round(100 * box4.confidence) / 100;
if (!((_h = config3.face.mesh) == null ? void 0 : _h.enabled)) { if (!((_f = config3.face.mesh) == null ? void 0 : _f.enabled)) {
face5.box = getClampedBox(box4, input); face5.box = getClampedBox(box4, input);
face5.boxRaw = getRawBox(box4, input); face5.boxRaw = getRawBox(box4, input);
face5.score = Math.round(100 * box4.confidence || 0) / 100; face5.score = Math.round(100 * box4.confidence || 0) / 100;
@ -5670,17 +5682,17 @@ async function predict6(input, config3) {
let rawCoords = await coordsReshaped.array(); let rawCoords = await coordsReshaped.array();
tf12.dispose(contourCoords); tf12.dispose(contourCoords);
tf12.dispose(coordsReshaped); tf12.dispose(coordsReshaped);
if (faceConfidence < (((_i = config3.face.detector) == null ? void 0 : _i.minConfidence) || 1)) { if (faceConfidence < (((_g = config3.face.detector) == null ? void 0 : _g.minConfidence) || 1)) {
box4.confidence = faceConfidence; box4.confidence = faceConfidence;
} else { } else {
if ((_j = config3.face.iris) == null ? void 0 : _j.enabled) if ((_h = config3.face.iris) == null ? void 0 : _h.enabled)
rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5); rawCoords = await augmentIris(rawCoords, face5.tensor, config3, inputSize5);
face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5); face5.mesh = transformRawCoords(rawCoords, box4, angle, rotationMatrix, inputSize5);
face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]); face5.meshRaw = face5.mesh.map((pt) => [pt[0] / (input.shape[2] || 0), pt[1] / (input.shape[1] || 0), (pt[2] || 0) / inputSize5]);
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence }; box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]); face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (((_k = config3.face.detector) == null ? void 0 : _k.rotation) && config3.face.mesh.enabled && ((_l = config3.face.description) == null ? void 0 : _l.enabled) && env.kernels.includes("rotatewithoffset")) { if (((_i = config3.face.detector) == null ? void 0 : _i.rotation) && config3.face.mesh.enabled && ((_j = config3.face.description) == null ? void 0 : _j.enabled) && env.kernels.includes("rotatewithoffset")) {
tf12.dispose(face5.tensor); tf12.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} }
@ -5694,7 +5706,7 @@ async function predict6(input, config3) {
faces.push(face5); faces.push(face5);
newBoxes.push(box4); newBoxes.push(box4);
} }
if ((_m = config3.face.mesh) == null ? void 0 : _m.enabled) if ((_k = config3.face.mesh) == null ? void 0 : _k.enabled)
boxCache = newBoxes.filter((a) => { boxCache = newBoxes.filter((a) => {
var _a2; var _a2;
return a.confidence > (((_a2 = config3.face.detector) == null ? void 0 : _a2.minConfidence) || 0); return a.confidence > (((_a2 = config3.face.detector) == null ? void 0 : _a2.minConfidence) || 0);
@ -5725,8 +5737,8 @@ var uvmap = UV468;
// src/face/faceres.ts // src/face/faceres.ts
var tf13 = __toModule(require_tfjs_esm()); var tf13 = __toModule(require_tfjs_esm());
var model9; var model9;
var last6 = []; var last3 = [];
var lastTime4 = 0; var lastTime7 = 0;
var lastCount3 = 0; var lastCount3 = 0;
var skipped8 = Number.MAX_SAFE_INTEGER; var skipped8 = Number.MAX_SAFE_INTEGER;
async function load9(config3) { async function load9(config3) {
@ -5762,9 +5774,11 @@ async function predict7(image25, config3, idx, count2) {
var _a, _b, _c, _d; var _a, _b, _c, _d;
if (!model9) if (!model9)
return null; return null;
if (skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0) && (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) <= now() - lastTime4 && config3.skipFrame && lastCount3 === count2 && ((_c = last6[idx]) == null ? void 0 : _c.age) && ((_d = last6[idx]) == null ? void 0 : _d.age) > 0) { const skipFrame = skipped8 < (((_a = config3.face.description) == null ? void 0 : _a.skipFrames) || 0);
const skipTime = (((_b = config3.face.description) == null ? void 0 : _b.skipTime) || 0) > now() - lastTime7;
if (config3.skipAllowed && skipFrame && skipTime && lastCount3 === count2 && ((_c = last3[idx]) == null ? void 0 : _c.age) && ((_d = last3[idx]) == null ? void 0 : _d.age) > 0) {
skipped8++; skipped8++;
return last6[idx]; return last3[idx];
} }
skipped8 = 0; skipped8 = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -5778,7 +5792,7 @@ async function predict7(image25, config3, idx, count2) {
if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) { if ((_a2 = config3.face.description) == null ? void 0 : _a2.enabled) {
const enhanced = enhance(image25); const enhanced = enhance(image25);
const resT = await (model9 == null ? void 0 : model9.predict(enhanced)); const resT = await (model9 == null ? void 0 : model9.predict(enhanced));
lastTime4 = now(); lastTime7 = now();
tf13.dispose(enhanced); tf13.dispose(enhanced);
const genderT = await resT.find((t) => t.shape[1] === 1); const genderT = await resT.find((t) => t.shape[1] === 1);
const gender = await genderT.data(); const gender = await genderT.data();
@ -5798,7 +5812,7 @@ async function predict7(image25, config3, idx, count2) {
obj.descriptor = Array.from(descriptor); obj.descriptor = Array.from(descriptor);
resT.forEach((t) => tf13.dispose(t)); resT.forEach((t) => tf13.dispose(t));
} }
last6[idx] = obj; last3[idx] = obj;
lastCount3 = count2; lastCount3 = count2;
resolve(obj); resolve(obj);
}); });
@ -8953,7 +8967,7 @@ var handBoxEnlargeFactor = 1.65;
var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2]; var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
var palmLandmarksPalmBase = 0; var palmLandmarksPalmBase = 0;
var palmLandmarksMiddleFingerBase = 2; var palmLandmarksMiddleFingerBase = 2;
var lastTime5 = 0; var lastTime8 = 0;
var HandPipeline = class { var HandPipeline = class {
constructor(handDetector, handPoseModel2) { constructor(handDetector, handPoseModel2) {
__publicField(this, "handDetector"); __publicField(this, "handDetector");
@ -8966,7 +8980,7 @@ var HandPipeline = class {
this.handPoseModel = handPoseModel2; this.handPoseModel = handPoseModel2;
this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0; this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0;
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = Number.MAX_SAFE_INTEGER;
this.detectedHands = 0; this.detectedHands = 0;
} }
calculateLandmarksBoundingBox(landmarks) { calculateLandmarksBoundingBox(landmarks) {
@ -9018,11 +9032,13 @@ var HandPipeline = class {
async estimateHands(image25, config3) { async estimateHands(image25, config3) {
let useFreshBox = false; let useFreshBox = false;
let boxes; let boxes;
if (this.skipped === 0 || this.skipped > config3.hand.skipFrames && (config3.hand.skipTime || 0) <= now() - lastTime5 || !config3.hand.landmarks || !config3.skipFrame) { const skipTime = (config3.hand.skipTime || 0) > now() - lastTime8;
const skipFrame = this.skipped < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
boxes = await this.handDetector.estimateHandBounds(image25, config3); boxes = await this.handDetector.estimateHandBounds(image25, config3);
this.skipped = 0; this.skipped = 0;
} }
if (config3.skipFrame) if (config3.skipAllowed)
this.skipped++; this.skipped++;
if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config3.hand.maxDetected || !config3.hand.landmarks)) { if (boxes && boxes.length > 0 && (boxes.length !== this.detectedHands && this.detectedHands !== config3.hand.maxDetected || !config3.hand.landmarks)) {
this.detectedHands = 0; this.detectedHands = 0;
@ -9047,7 +9063,7 @@ var HandPipeline = class {
tf16.dispose(croppedInput); tf16.dispose(croppedInput);
tf16.dispose(rotatedImage); tf16.dispose(rotatedImage);
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage); const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
lastTime5 = now(); lastTime8 = now();
tf16.dispose(handImage); tf16.dispose(handImage);
const confidence = (await confidenceT.data())[0]; const confidence = (await confidenceT.data())[0];
tf16.dispose(confidenceT); tf16.dispose(confidenceT);
@ -9616,8 +9632,8 @@ var faceIndex = 4;
var boxExpandFact = 1.6; var boxExpandFact = 1.6;
var maxDetectorResolution = 512; var maxDetectorResolution = 512;
var detectorExpandFact = 1.4; var detectorExpandFact = 1.4;
var skipped9 = 0; var skipped9 = Number.MAX_SAFE_INTEGER;
var lastTime6 = 0; var lastTime9 = 0;
var outputSize = [0, 0]; var outputSize = [0, 0];
var cache3 = { var cache3 = {
boxes: [], boxes: [],
@ -9757,17 +9773,21 @@ async function predict9(input, config3) {
return []; return [];
outputSize = [input.shape[2] || 0, input.shape[1] || 0]; outputSize = [input.shape[2] || 0, input.shape[1] || 0];
skipped9++; skipped9++;
if (config3.skipFrame && skipped9 <= (config3.hand.skipFrames || 0) && (config3.hand.skipTime || 0) <= now() - lastTime6) { const skipTime = (config3.hand.skipTime || 0) > now() - lastTime9;
const skipFrame = skipped9 < (config3.hand.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
return cache3.hands; return cache3.hands;
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (config3.skipFrame && cache3.hands.length === config3.hand.maxDetected) { const skipTimeExtended = 3 * (config3.hand.skipTime || 0) > now() - lastTime9;
const skipFrameExtended = skipped9 < 3 * (config3.hand.skipFrames || 0);
if (config3.skipAllowed && cache3.hands.length === config3.hand.maxDetected) {
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
} else if (config3.skipFrame && skipped9 < 3 * (config3.hand.skipFrames || 0) && (config3.hand.skipTime || 0) <= 3 * (now() - lastTime6) && cache3.hands.length > 0) { } else if (config3.skipAllowed && skipTimeExtended && skipFrameExtended && cache3.hands.length > 0) {
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
} else { } else {
cache3.boxes = await detectHands(input, config3); cache3.boxes = await detectHands(input, config3);
lastTime6 = now(); lastTime9 = now();
cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3))); cache3.hands = await Promise.all(cache3.boxes.map((handBox) => detectFingers(input, handBox, config3)));
skipped9 = 0; skipped9 = 0;
} }
@ -10066,10 +10086,12 @@ async function parseMultiPose(res, config3, image25, inputBox) {
async function predict10(input, config3) { async function predict10(input, config3) {
if (!model10 || !(model10 == null ? void 0 : model10.inputs[0].shape)) if (!model10 || !(model10 == null ? void 0 : model10.inputs[0].shape))
return []; return [];
if (!config3.skipFrame) if (!config3.skipAllowed)
cache5.boxes.length = 0; cache5.boxes.length = 0;
skipped10++; skipped10++;
if (config3.skipFrame && (skipped10 <= (config3.body.skipFrames || 0) && (config3.body.skipTime || 0) <= now() - cache5.last)) { const skipTime = (config3.body.skipTime || 0) > now() - cache5.last;
const skipFrame = skipped10 < (config3.body.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame) {
return cache5.bodies; return cache5.bodies;
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -10092,8 +10114,8 @@ async function predict10(input, config3) {
// src/object/nanodet.ts // src/object/nanodet.ts
var tf21 = __toModule(require_tfjs_esm()); var tf21 = __toModule(require_tfjs_esm());
var model11; var model11;
var last7 = []; var last4 = [];
var lastTime7 = 0; var lastTime10 = 0;
var skipped11 = Number.MAX_SAFE_INTEGER; var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5; var scaleBox = 2.5;
async function load12(config3) { async function load12(config3) {
@ -10173,13 +10195,15 @@ async function process4(res, inputSize8, outputShape, config3) {
return results; return results;
} }
async function predict11(image25, config3) { async function predict11(image25, config3) {
if (skipped11 < (config3.object.skipFrames || 0) && (config3.object.skipTime || 0) <= now() - lastTime7 && config3.skipFrame && last7.length > 0) { const skipTime = (config3.object.skipTime || 0) > now() - lastTime10;
const skipFrame = skipped11 < (config3.object.skipFrames || 0);
if (config3.skipAllowed && skipTime && skipFrame && last4.length > 0) {
skipped11++; skipped11++;
return last7; return last4;
} }
skipped11 = 0; skipped11 = 0;
if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last7; return last4;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]]; const outputSize2 = [image25.shape[2], image25.shape[1]];
const resize = tf21.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false); const resize = tf21.image.resizeBilinear(image25, [model11.inputSize, model11.inputSize], false);
@ -10190,10 +10214,10 @@ async function predict11(image25, config3) {
let objectT; let objectT;
if (config3.object.enabled) if (config3.object.enabled)
objectT = await model11.predict(transpose); objectT = await model11.predict(transpose);
lastTime7 = now(); lastTime10 = now();
tf21.dispose(transpose); tf21.dispose(transpose);
const obj = await process4(objectT, model11.inputSize, outputSize2, config3); const obj = await process4(objectT, model11.inputSize, outputSize2, config3);
last7 = obj; last4 = obj;
resolve(obj); resolve(obj);
}); });
} }
@ -11393,7 +11417,7 @@ async function canvas2(input, output) {
async function all(inCanvas2, result, drawOptions) { async function all(inCanvas2, result, drawOptions) {
if (!result || !result.performance || !result || !inCanvas2) if (!result || !result.performance || !result || !inCanvas2)
return null; return null;
const timestamp = now(); const timeStamp = now();
const localOptions = mergeDeep(options2, drawOptions); const localOptions = mergeDeep(options2, drawOptions);
const promise = Promise.all([ const promise = Promise.all([
face(inCanvas2, result.face, localOptions), face(inCanvas2, result.face, localOptions),
@ -11402,7 +11426,7 @@ async function all(inCanvas2, result, drawOptions) {
object(inCanvas2, result.object, localOptions), object(inCanvas2, result.object, localOptions),
gesture(inCanvas2, result.gesture, localOptions) gesture(inCanvas2, result.gesture, localOptions)
]); ]);
result.performance.draw = Math.trunc(now() - timestamp); result.performance.draw = env.perfadd ? (result.performance.draw || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
return promise; return promise;
} }
@ -11530,7 +11554,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:face"; parent.state = "run:face";
timeStamp = now(); timeStamp = now();
const faces = await predict6(input, parent.config); const faces = await predict6(input, parent.config);
parent.performance.face = Math.trunc(now() - timeStamp); parent.performance.face = env.perfadd ? (parent.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (!input.shape || input.shape.length !== 4) if (!input.shape || input.shape.length !== 4)
return []; return [];
if (!faces) if (!faces)
@ -11549,7 +11573,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:emotion"; parent.state = "run:emotion";
timeStamp = now(); timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict5(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null; emotionRes = parent.config.face.emotion.enabled ? await predict5(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null;
parent.performance.emotion = Math.trunc(now() - timeStamp); parent.performance.emotion = env.perfadd ? (parent.performance.emotion || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End Emotion:"); parent.analyze("End Emotion:");
parent.analyze("Start AntiSpoof:"); parent.analyze("Start AntiSpoof:");
@ -11559,7 +11583,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:antispoof"; parent.state = "run:antispoof";
timeStamp = now(); timeStamp = now();
antispoofRes = parent.config.face.antispoof.enabled ? await predict(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null; antispoofRes = parent.config.face.antispoof.enabled ? await predict(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null;
parent.performance.antispoof = Math.trunc(now() - timeStamp); parent.performance.antispoof = env.perfadd ? (parent.performance.antispoof || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End AntiSpoof:"); parent.analyze("End AntiSpoof:");
parent.analyze("Start Description:"); parent.analyze("Start Description:");
@ -11569,7 +11593,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:description"; parent.state = "run:description";
timeStamp = now(); timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict7(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null; descRes = parent.config.face.description.enabled ? await predict7(faces[i].tensor || tf26.tensor([]), parent.config, i, faces.length) : null;
parent.performance.embedding = Math.trunc(now() - timeStamp); parent.performance.embedding = env.perfadd ? (parent.performance.embedding || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze("End Description:"); parent.analyze("End Description:");
if (parent.config.async) { if (parent.config.async) {
@ -12888,7 +12912,7 @@ var Human = class {
const count2 = Object.values(this.models).filter((model14) => model14).length; const count2 = Object.values(this.models).filter((model14) => model14).length;
if (userConfig) if (userConfig)
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (env.initial) { if (this.env.initial) {
if (this.config.debug) if (this.config.debug)
log(`version: ${this.version}`); log(`version: ${this.version}`);
if (this.config.debug) if (this.config.debug)
@ -12904,9 +12928,9 @@ var Human = class {
} }
} }
await load15(this); await load15(this);
if (env.initial && this.config.debug) if (this.env.initial && this.config.debug)
log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors"); log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors");
env.initial = false; this.env.initial = false;
const loaded = Object.values(this.models).filter((model14) => model14).length; const loaded = Object.values(this.models).filter((model14) => model14).length;
if (loaded !== count2) { if (loaded !== count2) {
await validate2(this); await validate2(this);
@ -12914,7 +12938,7 @@ var Human = class {
} }
const current = Math.trunc(now() - timeStamp); const current = Math.trunc(now() - timeStamp);
if (current > (this.performance.load || 0)) if (current > (this.performance.load || 0))
this.performance.load = current; this.performance.load = this.env.perfadd ? (this.performance.load || 0) + current : current;
} }
next(result = this.result) { next(result = this.result) {
return calc2(result, this.config); return calc2(result, this.config);
@ -12928,7 +12952,6 @@ var Human = class {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
this.state = "config"; this.state = "config";
let timeStamp; let timeStamp;
let elapsedTime;
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
this.state = "check"; this.state = "check";
const error = __privateGet(this, _sanity).call(this, input); const error = __privateGet(this, _sanity).call(this, input);
@ -12943,7 +12966,7 @@ var Human = class {
this.state = "image"; this.state = "image";
const img = process2(input, this.config); const img = process2(input, this.config);
this.process = img; this.process = img;
this.performance.image = Math.trunc(now() - timeStamp); this.performance.image = this.env.perfadd ? (this.performance.image || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Get Image:"); this.analyze("Get Image:");
if (!img.tensor) { if (!img.tensor) {
if (this.config.debug) if (this.config.debug)
@ -12953,15 +12976,15 @@ var Human = class {
} }
this.emit("image"); this.emit("image");
timeStamp = now(); timeStamp = now();
this.config.skipFrame = await skip(this.config, img.tensor); this.config.skipAllowed = await skip(this.config, img.tensor);
if (!this.performance.frames) if (!this.performance.frames)
this.performance.frames = 0; this.performance.frames = 0;
if (!this.performance.cached) if (!this.performance.cached)
this.performance.cached = 0; this.performance.cached = 0;
this.performance.frames++; this.performance.frames++;
if (this.config.skipFrame) if (this.config.skipAllowed)
this.performance.cached++; this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp); this.performance.changed = this.env.perfadd ? (this.performance.changed || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze("Check Changed:"); this.analyze("Check Changed:");
let faceRes = []; let faceRes = [];
let bodyRes = []; let bodyRes = [];
@ -12975,9 +12998,7 @@ var Human = class {
} else { } else {
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.face = this.env.perfadd ? (this.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.face = elapsedTime;
} }
if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1)) if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1))
faceRes = await faceRes; faceRes = await faceRes;
@ -13005,9 +13026,7 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict4(img.tensor, bodyConfig) : []; bodyRes = this.config.body.enabled ? await predict4(img.tensor, bodyConfig) : [];
else if ((_h = this.config.body.modelPath) == null ? void 0 : _h.includes("movenet")) else if ((_h = this.config.body.modelPath) == null ? void 0 : _h.includes("movenet"))
bodyRes = this.config.body.enabled ? await predict10(img.tensor, bodyConfig) : []; bodyRes = this.config.body.enabled ? await predict10(img.tensor, bodyConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.body = this.env.perfadd ? (this.performance.body || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.body = elapsedTime;
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
@ -13026,9 +13045,7 @@ var Human = class {
handRes = this.config.hand.enabled ? await predict8(img.tensor, handConfig) : []; handRes = this.config.hand.enabled ? await predict8(img.tensor, handConfig) : [];
else if ((_p = (_o = this.config.hand.detector) == null ? void 0 : _o.modelPath) == null ? void 0 : _p.includes("handtrack")) else if ((_p = (_o = this.config.hand.detector) == null ? void 0 : _o.modelPath) == null ? void 0 : _p.includes("handtrack"))
handRes = this.config.hand.enabled ? await predict9(img.tensor, handConfig) : []; handRes = this.config.hand.enabled ? await predict9(img.tensor, handConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.hand = this.env.perfadd ? (this.performance.hand || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.hand = elapsedTime;
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
@ -13046,9 +13063,7 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict11(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict11(img.tensor, this.config) : [];
else if ((_t = this.config.object.modelPath) == null ? void 0 : _t.includes("centernet")) else if ((_t = this.config.object.modelPath) == null ? void 0 : _t.includes("centernet"))
objectRes = this.config.object.enabled ? await predict3(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict3(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.object = this.env.perfadd ? (this.performance.object || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.performance.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
this.state = "detect:await"; this.state = "detect:await";
@ -13060,7 +13075,7 @@ var Human = class {
timeStamp = now(); timeStamp = now();
gestureRes = [...face2(faceRes), ...body2(bodyRes), ...hand2(handRes), ...iris3(faceRes)]; gestureRes = [...face2(faceRes), ...body2(bodyRes), ...hand2(handRes), ...iris3(faceRes)];
if (!this.config.async) if (!this.config.async)
this.performance.gesture = Math.trunc(now() - timeStamp); this.performance.gesture = this.env.perfadd ? (this.performance.gesture || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
else if (this.performance.gesture) else if (this.performance.gesture)
delete this.performance.gesture; delete this.performance.gesture;
} }

View File

@ -16,7 +16,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
let outputNodes: string[]; // different for lite/full/heavy let outputNodes: string[]; // different for lite/full/heavy
let cache: BodyResult | null = null; let cache: BodyResult | null = null;
let padding: [number, number][] = [[0, 0], [0, 0], [0, 0], [0, 0]]; let padding: [number, number][] = [[0, 0], [0, 0], [0, 0], [0, 0]];
let last = 0; let lastTime = 0;
export async function loadDetect(config: Config): Promise<GraphModel> { export async function loadDetect(config: Config): Promise<GraphModel> {
if (env.initial) models[0] = null; if (env.initial) models[0] = null;
@ -136,11 +136,13 @@ async function detectParts(input: Tensor, config: Config, outputSize: [number, n
export async function predict(input: Tensor, config: Config): Promise<BodyResult[]> { export async function predict(input: Tensor, config: Config): Promise<BodyResult[]> {
const outputSize: [number, number] = [input.shape[2] || 0, input.shape[1] || 0]; const outputSize: [number, number] = [input.shape[2] || 0, input.shape[1] || 0];
if ((skipped < (config.body.skipFrames || 0)) && ((config.body.skipTime || 0) <= (now() - last)) && config.skipFrame && cache !== null) { const skipTime = (config.body.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.body.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && cache !== null) {
skipped++; skipped++;
} else { } else {
cache = await detectParts(input, config, outputSize); cache = await detectParts(input, config, outputSize);
last = now(); lastTime = now();
skipped = 0; skipped = 0;
} }
if (cache) return [cache]; if (cache) return [cache];

View File

@ -13,7 +13,7 @@ import type { Config } from '../config';
import { env } from '../util/env'; import { env } from '../util/env';
let model: GraphModel | null; let model: GraphModel | null;
let last = 0; let lastTime = 0;
const cache: BodyResult = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} }; const cache: BodyResult = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };
// const keypoints: Array<BodyKeypoint> = []; // const keypoints: Array<BodyKeypoint> = [];
@ -50,7 +50,9 @@ function max2d(inputs, minScore) {
} }
export async function predict(image: Tensor, config: Config): Promise<BodyResult[]> { export async function predict(image: Tensor, config: Config): Promise<BodyResult[]> {
if ((skipped < (config.body?.skipFrames || 0)) && config.skipFrame && Object.keys(cache.keypoints).length > 0 && ((config.body.skipTime || 0) <= (now() - last))) { const skipTime = (config.body.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.body.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && Object.keys(cache.keypoints).length > 0) {
skipped++; skipped++;
return [cache]; return [cache];
} }
@ -66,7 +68,7 @@ export async function predict(image: Tensor, config: Config): Promise<BodyResult
let resT; let resT;
if (config.body.enabled) resT = await model?.predict(tensor); if (config.body.enabled) resT = await model?.predict(tensor);
last = now(); lastTime = now();
tf.dispose(tensor); tf.dispose(tensor);
if (resT) { if (resT) {

View File

@ -132,9 +132,11 @@ async function parseMultiPose(res, config, image, inputBox) {
export async function predict(input: Tensor, config: Config): Promise<BodyResult[]> { export async function predict(input: Tensor, config: Config): Promise<BodyResult[]> {
if (!model || !model?.inputs[0].shape) return []; // something is wrong with the model if (!model || !model?.inputs[0].shape) return []; // something is wrong with the model
if (!config.skipFrame) cache.boxes.length = 0; // allowed to use cache or not if (!config.skipAllowed) cache.boxes.length = 0; // allowed to use cache or not
skipped++; // increment skip frames skipped++; // increment skip frames
if (config.skipFrame && (skipped <= (config.body.skipFrames || 0) && ((config.body.skipTime || 0) <= (now() - cache.last)))) { const skipTime = (config.body.skipTime || 0) > (now() - cache.last);
const skipFrame = skipped < (config.body.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame) {
return cache.bodies; // return cached results without running anything return cache.bodies; // return cached results without running anything
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {

View File

@ -255,7 +255,7 @@ export interface Config {
cacheSensitivity: number; cacheSensitivity: number;
/** Internal Variable */ /** Internal Variable */
skipFrame: boolean; skipAllowed: boolean;
/** Run input through image filters before inference /** Run input through image filters before inference
* - image filters run with near-zero latency as they are executed on the GPU * - image filters run with near-zero latency as they are executed on the GPU
@ -302,10 +302,10 @@ const config: Config = {
// warmup pre-initializes all models for faster inference but can take // warmup pre-initializes all models for faster inference but can take
// significant time on startup // significant time on startup
// only used for `webgl` and `humangl` backends // only used for `webgl` and `humangl` backends
cacheSensitivity: 0.75, // cache sensitivity cacheSensitivity: 0.70, // cache sensitivity
// values 0..1 where 0.01 means reset cache if input changed more than 1% // values 0..1 where 0.01 means reset cache if input changed more than 1%
// set to 0 to disable caching // set to 0 to disable caching
skipFrame: false, // internal & dynamic skipAllowed: false, // internal & dynamic
filter: { // run input through image filters before inference filter: { // run input through image filters before inference
// image filters run with near-zero latency as they are executed on the GPU // image filters run with near-zero latency as they are executed on the GPU
enabled: true, // enable image pre-processing filters enabled: true, // enable image pre-processing filters
@ -347,9 +347,9 @@ const config: Config = {
// this parameter is not valid in nodejs // this parameter is not valid in nodejs
maxDetected: 1, // maximum number of faces detected in the input maxDetected: 1, // maximum number of faces detected in the input
// should be set to the minimum number for performance // should be set to the minimum number for performance
skipFrames: 11, // how many max frames to go without re-running the face bounding box detector skipFrames: 99, // how many max frames to go without re-running the face bounding box detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
skipTime: 2000, // how many ms to go without re-running the face bounding box detector skipTime: 2500, // how many ms to go without re-running the face bounding box detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
minConfidence: 0.2, // threshold for discarding a prediction minConfidence: 0.2, // threshold for discarding a prediction
iouThreshold: 0.1, // ammount of overlap between two detected objects before one object is removed iouThreshold: 0.1, // ammount of overlap between two detected objects before one object is removed
@ -371,9 +371,9 @@ const config: Config = {
emotion: { emotion: {
enabled: true, enabled: true,
minConfidence: 0.1, // threshold for discarding a prediction minConfidence: 0.1, // threshold for discarding a prediction
skipFrames: 12, // how max many frames to go without re-running the detector skipFrames: 99, // how max many frames to go without re-running the detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
skipTime: 2000, // how many ms to go without re-running the face bounding box detector skipTime: 1500, // how many ms to go without re-running the face bounding box detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
modelPath: 'emotion.json', // face emotion model, can be absolute path or relative to modelBasePath modelPath: 'emotion.json', // face emotion model, can be absolute path or relative to modelBasePath
}, },
@ -383,18 +383,18 @@ const config: Config = {
// recommended to enable detector.rotation and mesh.enabled // recommended to enable detector.rotation and mesh.enabled
modelPath: 'faceres.json', // face description model modelPath: 'faceres.json', // face description model
// can be either absolute path or relative to modelBasePath // can be either absolute path or relative to modelBasePath
skipFrames: 13, // how many max frames to go without re-running the detector skipFrames: 99, // how many max frames to go without re-running the detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
skipTime: 2000, // how many ms to go without re-running the face bounding box detector skipTime: 3000, // how many ms to go without re-running the face bounding box detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
minConfidence: 0.1, // threshold for discarding a prediction minConfidence: 0.1, // threshold for discarding a prediction
}, },
antispoof: { antispoof: {
enabled: false, enabled: false,
skipFrames: 14, // how max many frames to go without re-running the detector skipFrames: 99, // how max many frames to go without re-running the detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
skipTime: 2000, // how many ms to go without re-running the face bounding box detector skipTime: 4000, // how many ms to go without re-running the face bounding box detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
modelPath: 'antispoof.json', // face description model modelPath: 'antispoof.json', // face description model
// can be either absolute path or relative to modelBasePath // can be either absolute path or relative to modelBasePath
@ -415,7 +415,7 @@ const config: Config = {
minConfidence: 0.3, // threshold for discarding a prediction minConfidence: 0.3, // threshold for discarding a prediction
skipFrames: 1, // how many max frames to go without re-running the detector skipFrames: 1, // how many max frames to go without re-running the detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
skipTime: 2000, // how many ms to go without re-running the face bounding box detector skipTime: 200, // how many ms to go without re-running the face bounding box detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
}, },
@ -424,7 +424,7 @@ const config: Config = {
rotation: true, // use best-guess rotated hand image or just box with rotation as-is rotation: true, // use best-guess rotated hand image or just box with rotation as-is
// false means higher performance, but incorrect finger mapping if hand is inverted // false means higher performance, but incorrect finger mapping if hand is inverted
// only valid for `handdetect` variation // only valid for `handdetect` variation
skipFrames: 2, // how many max frames to go without re-running the hand bounding box detector skipFrames: 99, // how many max frames to go without re-running the hand bounding box detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
skipTime: 2000, // how many ms to go without re-running the face bounding box detector skipTime: 2000, // how many ms to go without re-running the face bounding box detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
@ -450,9 +450,9 @@ const config: Config = {
minConfidence: 0.2, // threshold for discarding a prediction minConfidence: 0.2, // threshold for discarding a prediction
iouThreshold: 0.4, // ammount of overlap between two detected objects before one object is removed iouThreshold: 0.4, // ammount of overlap between two detected objects before one object is removed
maxDetected: 10, // maximum number of objects detected in the input maxDetected: 10, // maximum number of objects detected in the input
skipFrames: 15, // how many max frames to go without re-running the detector skipFrames: 99, // how many max frames to go without re-running the detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
skipTime: 2000, // how many ms to go without re-running the face bounding box detector skipTime: 1000, // how many ms to go without re-running object detector
// only used when cacheSensitivity is not zero // only used when cacheSensitivity is not zero
}, },

View File

@ -12,7 +12,7 @@ let model: GraphModel | null;
const cached: Array<number> = []; const cached: Array<number> = [];
let skipped = Number.MAX_SAFE_INTEGER; let skipped = Number.MAX_SAFE_INTEGER;
let lastCount = 0; let lastCount = 0;
let last = 0; let lastTime = 0;
export async function load(config: Config): Promise<GraphModel> { export async function load(config: Config): Promise<GraphModel> {
if (env.initial) model = null; if (env.initial) model = null;
@ -26,7 +26,9 @@ export async function load(config: Config): Promise<GraphModel> {
export async function predict(image: Tensor, config: Config, idx, count) { export async function predict(image: Tensor, config: Config, idx, count) {
if (!model) return null; if (!model) return null;
if ((skipped < (config.face.antispoof?.skipFrames || 0)) && ((config.face.antispoof?.skipTime || 0) <= (now() - last)) && config.skipFrame && (lastCount === count) && cached[idx]) { const skipTime = (config.face.antispoof?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.antispoof?.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && cached[idx]) {
skipped++; skipped++;
return cached[idx]; return cached[idx];
} }
@ -37,7 +39,7 @@ export async function predict(image: Tensor, config: Config, idx, count) {
const num = (await res.data())[0]; const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100; cached[idx] = Math.round(100 * num) / 100;
lastCount = count; lastCount = count;
last = now(); lastTime = now();
tf.dispose([resize, res]); tf.dispose([resize, res]);
resolve(cached[idx]); resolve(cached[idx]);
}); });

View File

@ -4,6 +4,7 @@
*/ */
import { log, now } from '../util/util'; import { log, now } from '../util/util';
import { env } from '../util/env';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as facemesh from './facemesh'; import * as facemesh from './facemesh';
import * as emotion from '../gear/emotion'; import * as emotion from '../gear/emotion';
@ -29,7 +30,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
timeStamp = now(); timeStamp = now();
const faces = await facemesh.predict(input, parent.config); const faces = await facemesh.predict(input, parent.config);
parent.performance.face = Math.trunc(now() - timeStamp); parent.performance.face = env.perfadd ? (parent.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (!input.shape || input.shape.length !== 4) return []; if (!input.shape || input.shape.length !== 4) return [];
if (!faces) return []; if (!faces) return [];
// for (const face of faces) { // for (const face of faces) {
@ -53,7 +54,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
parent.state = 'run:emotion'; parent.state = 'run:emotion';
timeStamp = now(); timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null; emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
parent.performance.emotion = Math.trunc(now() - timeStamp); parent.performance.emotion = env.perfadd ? (parent.performance.emotion || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze('End Emotion:'); parent.analyze('End Emotion:');
@ -65,7 +66,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
parent.state = 'run:antispoof'; parent.state = 'run:antispoof';
timeStamp = now(); timeStamp = now();
antispoofRes = parent.config.face.antispoof.enabled ? await antispoof.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null; antispoofRes = parent.config.face.antispoof.enabled ? await antispoof.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
parent.performance.antispoof = Math.trunc(now() - timeStamp); parent.performance.antispoof = env.perfadd ? (parent.performance.antispoof || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze('End AntiSpoof:'); parent.analyze('End AntiSpoof:');
@ -91,7 +92,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
parent.state = 'run:description'; parent.state = 'run:description';
timeStamp = now(); timeStamp = now();
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null; descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
parent.performance.embedding = Math.trunc(now() - timeStamp); parent.performance.embedding = env.perfadd ? (parent.performance.embedding || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze('End Description:'); parent.analyze('End Description:');

View File

@ -28,7 +28,10 @@ let detectedFaces = 0;
export async function predict(input: Tensor, config: Config): Promise<FaceResult[]> { export async function predict(input: Tensor, config: Config): Promise<FaceResult[]> {
// reset cached boxes // reset cached boxes
if (!config.skipFrame || (((detectedFaces !== config.face.detector?.maxDetected) || !config.face.mesh?.enabled)) && (skipped > (config.face.detector?.skipFrames || 0) && ((config.face.description?.skipTime || 0) <= (now() - lastTime)))) {
const skipTime = (config.face.detector?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.detector?.skipFrames || 0);
if (!config.skipAllowed || !skipTime || !skipFrame || detectedFaces === 0) {
const newBoxes = await blazeface.getBoxes(input, config); // get results from blazeface detector const newBoxes = await blazeface.getBoxes(input, config); // get results from blazeface detector
lastTime = now(); lastTime = now();
boxCache = []; // empty cache boxCache = []; // empty cache

View File

@ -91,7 +91,9 @@ export function enhance(input): Tensor {
export async function predict(image: Tensor, config: Config, idx, count) { export async function predict(image: Tensor, config: Config, idx, count) {
if (!model) return null; if (!model) return null;
if ((skipped < (config.face.description?.skipFrames || 0)) && ((config.face.description?.skipTime || 0) <= (now() - lastTime)) && config.skipFrame && (lastCount === count) && last[idx]?.age && (last[idx]?.age > 0)) { const skipFrame = skipped < (config.face.description?.skipFrames || 0);
const skipTime = (config.face.description?.skipTime || 0) > (now() - lastTime);
if (config.skipAllowed && skipFrame && skipTime && (lastCount === count) && last[idx]?.age && (last[idx]?.age > 0)) {
skipped++; skipped++;
return last[idx]; return last[idx];
} }

View File

@ -33,7 +33,9 @@ export async function load(config: Config): Promise<GraphModel> {
export async function predict(image: Tensor, config: Config, idx, count) { export async function predict(image: Tensor, config: Config, idx, count) {
if (!model) return null; if (!model) return null;
if ((skipped < (config.face.emotion?.skipFrames || 0)) && ((config.face.emotion?.skipTime || 0) <= (now() - lastTime)) && config.skipFrame && (lastCount === count) && last[idx] && (last[idx].length > 0)) { const skipFrame = skipped < (config.face.emotion?.skipFrames || 0);
const skipTime = (config.face.emotion?.skipTime || 0) > (now() - lastTime);
if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && last[idx] && (last[idx].length > 0)) {
skipped++; skipped++;
return last[idx]; return last[idx];
} }

View File

@ -34,7 +34,11 @@ export async function load(config: Config | any) {
export async function predict(image: Tensor, config: Config) { export async function predict(image: Tensor, config: Config) {
if (!model) return null; if (!model) return null;
// @ts-ignore config disabled // @ts-ignore config disabled
if ((skipped < config.face.agegenderrace?.skipFrames) && ((config.face.agegenderrace?.skipTime || 0) <= (now() - lastTime)) && config.skipFrame && last.age && (last.age > 0)) { const skipFrame = skipped < (config.face.agegenderrace?.skipFrames || 0);
// @ts-ignore config disabled
const skipTime = (config.face.agegenderrace?.skipTime || 0) > (now() - lastTime);
// @ts-ignore config disabled
if (config.skipAllowed && skipTime && skipFrame && last.age && (last.age > 0)) {
skipped++; skipped++;
return last; return last;
} }

View File

@ -33,7 +33,9 @@ export async function load(config: Config | any) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any // eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function predict(image: Tensor, config: Config | any) { export async function predict(image: Tensor, config: Config | any) {
if (!model) return null; if (!model) return null;
if ((skipped < config.face.age.skipFrames) && ((config.face.age.skipTime || 0) <= (now() - lastTime)) && config.skipFrame && last.age && (last.age > 0)) { const skipTime = (config.face.age?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.age?.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && last.age && (last.age > 0)) {
skipped++; skipped++;
return last; return last;
} }

View File

@ -36,7 +36,9 @@ export async function load(config: Config | any) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any // eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function predict(image: Tensor, config: Config | any) { export async function predict(image: Tensor, config: Config | any) {
if (!model) return null; if (!model) return null;
if ((skipped < config.face.gender.skipFrames) && ((config.face.gender.skipTime || 0) <= (now() - lastTime)) && config.skipFrame && last.gender !== '') { const skipTime = (config.face.gender?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.gender?.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && last.gender !== '') {
skipped++; skipped++;
return last; return last;
} }

View File

@ -30,7 +30,7 @@ export class HandPipeline {
this.handPoseModel = handPoseModel; this.handPoseModel = handPoseModel;
this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0; this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0;
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = Number.MAX_SAFE_INTEGER;
this.detectedHands = 0; this.detectedHands = 0;
} }
@ -88,15 +88,15 @@ export class HandPipeline {
async estimateHands(image, config) { async estimateHands(image, config) {
let useFreshBox = false; let useFreshBox = false;
// run new detector every skipFrames unless we only want box to start with // run new detector every skipFrames
let boxes; let boxes;
const skipTime = (config.hand.skipTime || 0) > (now() - lastTime);
// console.log('handpipeline:estimateHands:skip criteria', this.skipped, config.hand.skipFrames, !config.hand.landmarks, !config.skipFrame); // should skip hand detector? const skipFrame = this.skipped < (config.hand.skipFrames || 0);
if ((this.skipped === 0) || ((this.skipped > config.hand.skipFrames) && ((config.hand.skipTime || 0) <= (now() - lastTime))) || !config.hand.landmarks || !config.skipFrame) { if (config.skipAllowed && skipTime && skipFrame) {
boxes = await this.handDetector.estimateHandBounds(image, config); boxes = await this.handDetector.estimateHandBounds(image, config);
this.skipped = 0; this.skipped = 0;
} }
if (config.skipFrame) this.skipped++; if (config.skipAllowed) this.skipped++;
// if detector result count doesn't match current working set, use it to reset current working set // if detector result count doesn't match current working set, use it to reset current working set
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxDetected) || !config.hand.landmarks)) { if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxDetected) || !config.hand.landmarks)) {

View File

@ -28,7 +28,7 @@ const boxExpandFact = 1.6;
const maxDetectorResolution = 512; const maxDetectorResolution = 512;
const detectorExpandFact = 1.4; const detectorExpandFact = 1.4;
let skipped = 0; let skipped = Number.MAX_SAFE_INTEGER;
let lastTime = 0; let lastTime = 0;
let outputSize: [number, number] = [0, 0]; let outputSize: [number, number] = [0, 0];
@ -183,23 +183,20 @@ async function detectFingers(input: Tensor, h: HandDetectResult, config: Config)
} }
export async function predict(input: Tensor, config: Config): Promise<HandResult[]> { export async function predict(input: Tensor, config: Config): Promise<HandResult[]> {
/** handtrack caching
* 1. if skipFrame returned cached
* 2. if any cached results but although not sure if its enough we continute anyhow for 3x skipframes
* 3. if not skipframe or eventually rerun detector to generated new cached boxes and reset skipped
* 4. generate cached boxes based on detected keypoints
*/
if (!models[0] || !models[1] || !models[0]?.inputs[0].shape || !models[1]?.inputs[0].shape) return []; // something is wrong with the model if (!models[0] || !models[1] || !models[0]?.inputs[0].shape || !models[1]?.inputs[0].shape) return []; // something is wrong with the model
outputSize = [input.shape[2] || 0, input.shape[1] || 0]; outputSize = [input.shape[2] || 0, input.shape[1] || 0];
skipped++; // increment skip frames skipped++; // increment skip frames
if (config.skipFrame && (skipped <= (config.hand.skipFrames || 0)) && ((config.hand.skipTime || 0) <= (now() - lastTime))) { const skipTime = (config.hand.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.hand.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame) {
return cache.hands; // return cached results without running anything return cache.hands; // return cached results without running anything
} }
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (config.skipFrame && cache.hands.length === config.hand.maxDetected) { // we have all detected hands const skipTimeExtended = 3 * (config.hand.skipTime || 0) > (now() - lastTime);
const skipFrameExtended = skipped < 3 * (config.hand.skipFrames || 0);
if (config.skipAllowed && cache.hands.length === config.hand.maxDetected) { // we have all detected hands so we're definitely skipping
cache.hands = await Promise.all(cache.boxes.map((handBox) => detectFingers(input, handBox, config))); cache.hands = await Promise.all(cache.boxes.map((handBox) => detectFingers(input, handBox, config)));
} else if (config.skipFrame && skipped < 3 * (config.hand.skipFrames || 0) && ((config.hand.skipTime || 0) <= 3 * (now() - lastTime)) && cache.hands.length > 0) { // we have some cached results: maybe not enough but anyhow continue for bit longer } else if (config.skipAllowed && skipTimeExtended && skipFrameExtended && cache.hands.length > 0) { // we have some cached results: maybe not enough but anyhow continue for bit longer
cache.hands = await Promise.all(cache.boxes.map((handBox) => detectFingers(input, handBox, config))); cache.hands = await Promise.all(cache.boxes.map((handBox) => detectFingers(input, handBox, config)));
} else { // finally rerun detector } else { // finally rerun detector
cache.boxes = await detectHands(input, config); cache.boxes = await detectHands(input, config);

View File

@ -326,7 +326,7 @@ export class Human {
const count = Object.values(this.models).filter((model) => model).length; const count = Object.values(this.models).filter((model) => model).length;
if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config; if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config;
if (env.initial) { // print version info on first run and check for correct backend setup if (this.env.initial) { // print version info on first run and check for correct backend setup
if (this.config.debug) log(`version: ${this.version}`); if (this.config.debug) log(`version: ${this.version}`);
if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`); if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`);
if (!await backend.check(this)) log('error: backend check failed'); if (!await backend.check(this)) log('error: backend check failed');
@ -338,8 +338,8 @@ export class Human {
} }
await models.load(this); // actually loads models await models.load(this); // actually loads models
if (env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run if (this.env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run
env.initial = false; this.env.initial = false;
const loaded = Object.values(this.models).filter((model) => model).length; const loaded = Object.values(this.models).filter((model) => model).length;
if (loaded !== count) { // number of loaded models changed if (loaded !== count) { // number of loaded models changed
@ -348,7 +348,7 @@ export class Human {
} }
const current = Math.trunc(now() - timeStamp); const current = Math.trunc(now() - timeStamp);
if (current > (this.performance.load as number || 0)) this.performance.load = current; if (current > (this.performance.load as number || 0)) this.performance.load = this.env.perfadd ? (this.performance.load || 0) + current : current;
} }
// emit event // emit event
@ -393,7 +393,6 @@ export class Human {
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
this.state = 'config'; this.state = 'config';
let timeStamp; let timeStamp;
let elapsedTime;
// update configuration // update configuration
this.config = mergeDeep(this.config, userConfig) as Config; this.config = mergeDeep(this.config, userConfig) as Config;
@ -418,7 +417,7 @@ export class Human {
this.state = 'image'; this.state = 'image';
const img = image.process(input, this.config) as { canvas: HTMLCanvasElement | OffscreenCanvas, tensor: Tensor }; const img = image.process(input, this.config) as { canvas: HTMLCanvasElement | OffscreenCanvas, tensor: Tensor };
this.process = img; this.process = img;
this.performance.image = Math.trunc(now() - timeStamp); this.performance.image = this.env.perfadd ? (this.performance.image || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze('Get Image:'); this.analyze('Get Image:');
if (!img.tensor) { if (!img.tensor) {
@ -429,12 +428,12 @@ export class Human {
this.emit('image'); this.emit('image');
timeStamp = now(); timeStamp = now();
this.config.skipFrame = await image.skip(this.config, img.tensor); this.config.skipAllowed = await image.skip(this.config, img.tensor);
if (!this.performance.frames) this.performance.frames = 0; if (!this.performance.frames) this.performance.frames = 0;
if (!this.performance.cached) this.performance.cached = 0; if (!this.performance.cached) this.performance.cached = 0;
(this.performance.frames as number)++; (this.performance.frames as number)++;
if (this.config.skipFrame) this.performance.cached++; if (this.config.skipAllowed) this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp); this.performance.changed = this.env.perfadd ? (this.performance.changed || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze('Check Changed:'); this.analyze('Check Changed:');
// prepare where to store model results // prepare where to store model results
@ -452,8 +451,7 @@ export class Human {
} else { } else {
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await face.detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await face.detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.face = this.env.perfadd ? (this.performance.face || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.face = elapsedTime;
} }
if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1)) faceRes = await faceRes; // need face result for auto-detect number of hands or bodies if (this.config.async && (this.config.body.maxDetected === -1 || this.config.hand.maxDetected === -1)) faceRes = await faceRes; // need face result for auto-detect number of hands or bodies
@ -474,8 +472,7 @@ export class Human {
else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(img.tensor, bodyConfig) : []; else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(img.tensor, bodyConfig) : [];
else if (this.config.body.modelPath?.includes('efficientpose')) bodyRes = this.config.body.enabled ? await efficientpose.predict(img.tensor, bodyConfig) : []; else if (this.config.body.modelPath?.includes('efficientpose')) bodyRes = this.config.body.enabled ? await efficientpose.predict(img.tensor, bodyConfig) : [];
else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? await movenet.predict(img.tensor, bodyConfig) : []; else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? await movenet.predict(img.tensor, bodyConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.body = this.env.perfadd ? (this.performance.body || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.body = elapsedTime;
} }
this.analyze('End Body:'); this.analyze('End Body:');
@ -491,8 +488,7 @@ export class Human {
timeStamp = now(); timeStamp = now();
if (this.config.hand.detector?.modelPath?.includes('handdetect')) handRes = this.config.hand.enabled ? await handpose.predict(img.tensor, handConfig) : []; if (this.config.hand.detector?.modelPath?.includes('handdetect')) handRes = this.config.hand.enabled ? await handpose.predict(img.tensor, handConfig) : [];
else if (this.config.hand.detector?.modelPath?.includes('handtrack')) handRes = this.config.hand.enabled ? await handtrack.predict(img.tensor, handConfig) : []; else if (this.config.hand.detector?.modelPath?.includes('handtrack')) handRes = this.config.hand.enabled ? await handtrack.predict(img.tensor, handConfig) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.hand = this.env.perfadd ? (this.performance.hand || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.hand = elapsedTime;
} }
this.analyze('End Hand:'); this.analyze('End Hand:');
@ -507,8 +503,7 @@ export class Human {
timeStamp = now(); timeStamp = now();
if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(img.tensor, this.config) : []; if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(img.tensor, this.config) : [];
else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(img.tensor, this.config) : []; else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); this.performance.object = this.env.perfadd ? (this.performance.object || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.object = elapsedTime;
} }
this.analyze('End Object:'); this.analyze('End Object:');
@ -522,7 +517,7 @@ export class Human {
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)]; gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
if (!this.config.async) this.performance.gesture = Math.trunc(now() - timeStamp); if (!this.config.async) this.performance.gesture = this.env.perfadd ? (this.performance.gesture || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
else if (this.performance.gesture) delete this.performance.gesture; else if (this.performance.gesture) delete this.performance.gesture;
} }

View File

@ -79,7 +79,9 @@ async function process(res: Tensor | null, outputShape, config: Config) {
} }
export async function predict(input: Tensor, config: Config): Promise<ObjectResult[]> { export async function predict(input: Tensor, config: Config): Promise<ObjectResult[]> {
if ((skipped < (config.object.skipFrames || 0)) && ((config.object.skipTime || 0) <= (now() - lastTime)) && config.skipFrame && (last.length > 0)) { const skipTime = (config.object.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.object.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && (last.length > 0)) {
skipped++; skipped++;
return last; return last;
} }

View File

@ -107,7 +107,9 @@ async function process(res, inputSize, outputShape, config) {
} }
export async function predict(image: Tensor, config: Config): Promise<ObjectResult[]> { export async function predict(image: Tensor, config: Config): Promise<ObjectResult[]> {
if ((skipped < (config.object.skipFrames || 0)) && ((config.object.skipTime || 0) <= (now() - lastTime)) && config.skipFrame && (last.length > 0)) { const skipTime = (config.object.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.object.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && (last.length > 0)) {
skipped++; skipped++;
return last; return last;
} }

View File

@ -194,7 +194,7 @@ export interface Result {
/** {@link ObjectResult}: detection & analysis results */ /** {@link ObjectResult}: detection & analysis results */
object: Array<ObjectResult> object: Array<ObjectResult>
/** global performance object with timing values for each operation */ /** global performance object with timing values for each operation */
performance: Record<string, unknown>, performance: Record<string, number>,
/** optional processed canvas that can be used to draw input on screen */ /** optional processed canvas that can be used to draw input on screen */
canvas?: OffscreenCanvas | HTMLCanvasElement | null | undefined, canvas?: OffscreenCanvas | HTMLCanvasElement | null | undefined,
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */ /** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */

View File

@ -4,6 +4,7 @@
import { TRI468 as triangulation } from '../face/facemeshcoords'; import { TRI468 as triangulation } from '../face/facemeshcoords';
import { mergeDeep, now } from './util'; import { mergeDeep, now } from './util';
import { env } from './env';
import type { Result, FaceResult, BodyResult, HandResult, ObjectResult, GestureResult, PersonResult, Point } from '../result'; import type { Result, FaceResult, BodyResult, HandResult, ObjectResult, GestureResult, PersonResult, Point } from '../result';
/** /**
@ -479,7 +480,7 @@ export async function canvas(input: HTMLCanvasElement | OffscreenCanvas | HTMLIm
export async function all(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Result, drawOptions?: Partial<DrawOptions>) { export async function all(inCanvas: HTMLCanvasElement | OffscreenCanvas, result: Result, drawOptions?: Partial<DrawOptions>) {
if (!result || !result.performance || !result || !inCanvas) return null; if (!result || !result.performance || !result || !inCanvas) return null;
const timestamp = now(); const timeStamp = now();
const localOptions = mergeDeep(options, drawOptions); const localOptions = mergeDeep(options, drawOptions);
const promise = Promise.all([ const promise = Promise.all([
face(inCanvas, result.face, localOptions), face(inCanvas, result.face, localOptions),
@ -489,6 +490,6 @@ export async function all(inCanvas: HTMLCanvasElement | OffscreenCanvas, result:
gesture(inCanvas, result.gesture, localOptions), // gestures do not have buffering gesture(inCanvas, result.gesture, localOptions), // gestures do not have buffering
// person(inCanvas, result.persons, localOptions); // already included above // person(inCanvas, result.persons, localOptions); // already included above
]); ]);
result.performance.draw = Math.trunc(now() - timestamp); result.performance.draw = env.perfadd ? (result.performance.draw as number || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
return promise; return promise;
} }

View File

@ -25,6 +25,8 @@ export class Env {
}; };
/** Is offscreenCanvas supported? */ /** Is offscreenCanvas supported? */
offscreen: undefined | boolean; offscreen: undefined | boolean;
/** Are performance counter instant values or additive */
perfadd: boolean = false;
/** WASM detected capabilities */ /** WASM detected capabilities */
wasm: { wasm: {
supported: undefined | boolean, supported: undefined | boolean,

View File

@ -1,26 +1,26 @@
2021-10-22 20:12:29 INFO:  @vladmandic/human version 2.4.0 2021-10-23 09:35:22 INFO:  @vladmandic/human version 2.4.0
2021-10-22 20:12:29 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.10.0 2021-10-23 09:35:22 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.10.0
2021-10-22 20:12:29 INFO:  Application: {"name":"@vladmandic/human","version":"2.4.0"} 2021-10-23 09:35:22 INFO:  Application: {"name":"@vladmandic/human","version":"2.4.0"}
2021-10-22 20:12:29 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true} 2021-10-23 09:35:22 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2021-10-22 20:12:29 INFO:  Toolchain: {"build":"0.6.2","esbuild":"0.13.8","typescript":"4.4.4","typedoc":"0.22.6","eslint":"8.0.1"} 2021-10-23 09:35:22 INFO:  Toolchain: {"build":"0.6.2","esbuild":"0.13.8","typescript":"4.4.4","typedoc":"0.22.6","eslint":"8.0.1"}
2021-10-22 20:12:29 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]} 2021-10-23 09:35:22 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2021-10-22 20:12:29 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]} 2021-10-23 09:35:22 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]}
2021-10-22 20:12:29 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275} 2021-10-23 09:35:22 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-10-22 20:12:29 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":54,"inputBytes":525194,"outputBytes":437512} 2021-10-23 09:35:22 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":54,"inputBytes":526471,"outputBytes":438971}
2021-10-22 20:12:29 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283} 2021-10-23 09:35:22 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-10-22 20:12:29 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":54,"inputBytes":525202,"outputBytes":437516} 2021-10-23 09:35:22 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":54,"inputBytes":526479,"outputBytes":438975}
2021-10-22 20:12:29 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350} 2021-10-23 09:35:22 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-10-22 20:12:29 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":54,"inputBytes":525269,"outputBytes":437588} 2021-10-23 09:35:22 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":54,"inputBytes":526546,"outputBytes":439047}
2021-10-22 20:12:29 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652} 2021-10-23 09:35:22 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-10-22 20:12:29 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973} 2021-10-23 09:35:22 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2323,"outputBytes":973}
2021-10-22 20:12:29 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":54,"inputBytes":524892,"outputBytes":439413} 2021-10-23 09:35:22 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":54,"inputBytes":526169,"outputBytes":440872}
2021-10-22 20:12:29 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2572202,"outputBytes":2491625} 2021-10-23 09:35:23 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2572202,"outputBytes":2491625}
2021-10-22 20:12:30 STATE: Compile: {"name":"human/browser/esm/custom","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.custom.esm.js","files":54,"inputBytes":3015544,"outputBytes":1606543} 2021-10-23 09:35:23 STATE: Compile: {"name":"human/browser/esm/custom","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.custom.esm.js","files":54,"inputBytes":3016821,"outputBytes":1607405}
2021-10-22 20:12:31 STATE: Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":8,"inputBytes":2323,"outputBytes":1155160} 2021-10-23 09:35:24 STATE: Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":8,"inputBytes":2323,"outputBytes":1155160}
2021-10-22 20:12:31 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":54,"inputBytes":1679079,"outputBytes":1430229} 2021-10-23 09:35:24 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":54,"inputBytes":1680356,"outputBytes":1431091}
2021-10-22 20:12:32 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":54,"inputBytes":1679079,"outputBytes":1878008} 2021-10-23 09:35:25 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":54,"inputBytes":1680356,"outputBytes":1879461}
2021-10-22 20:12:50 STATE: Typings: {"input":"src/human.ts","output":"types","files":16} 2021-10-23 09:35:44 STATE: Typings: {"input":"src/human.ts","output":"types","files":16}
2021-10-22 20:12:57 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":37,"generated":true} 2021-10-23 09:35:50 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":37,"generated":true}
2021-10-22 20:13:38 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":89,"errors":0,"warnings":0} 2021-10-23 09:36:28 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":89,"errors":0,"warnings":0}
2021-10-22 20:13:39 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"} 2021-10-23 09:36:28 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2021-10-22 20:13:39 INFO:  Done... 2021-10-23 09:36:28 INFO:  Done...

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
<!DOCTYPE html><html class="default no-js"><head><meta charSet="utf-8"/><meta http-equiv="x-ua-compatible" content="IE=edge"/><title>Config | @vladmandic/human - v2.4.0</title><meta name="description" content="Documentation for @vladmandic/human - v2.4.0"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="stylesheet" href="../assets/style.css"/><link rel="stylesheet" href="../assets/highlight.css"/><script async src="../assets/search.js" id="search-script"></script></head><body><script>document.body.classList.add(localStorage.getItem(&quot;tsd-theme&quot;) || &quot;os&quot;)</script><header><div class="tsd-page-toolbar"><div class="container"><div class="table-wrap"><div class="table-cell" id="tsd-search" data-base=".."><div class="field"><label for="tsd-search-field" class="tsd-widget search no-caption">Search</label><input type="text" id="tsd-search-field"/></div><ul class="results"><li class="state loading">Preparing search index...</li><li class="state failure">The search index is not available</li></ul><a href="../index.html" class="title">@vladmandic/human - v2.4.0</a></div><div class="table-cell" id="tsd-widgets"><div id="tsd-filter"><a href="#" class="tsd-widget options no-caption" data-toggle="options">Options</a><div class="tsd-filter-group"><div class="tsd-select" id="tsd-filter-visibility"><span class="tsd-select-label">All</span><ul class="tsd-select-list"><li data-value="public">Public</li><li data-value="protected">Public/Protected</li><li data-value="private" class="selected">All</li></ul></div> <input type="checkbox" id="tsd-filter-inherited" checked/><label class="tsd-widget" for="tsd-filter-inherited">Inherited</label></div></div><a href="#" class="tsd-widget menu no-caption" data-toggle="menu">Menu</a></div></div></div></div><div class="tsd-page-title"><div class="container"><ul class="tsd-breadcrumb"><li><a href="../index.html">@vladmandic/human - v2.4.0</a></li><li><a href="Config.html">Config</a></li></ul><h1>Interface Config</h1></div></div></header><div class="container container-main"><div class="row"><div class="col-8 col-content"><section class="tsd-panel tsd-comment"><div class="tsd-comment tsd-typography"><div class="lead"> <!DOCTYPE html><html class="default no-js"><head><meta charSet="utf-8"/><meta http-equiv="x-ua-compatible" content="IE=edge"/><title>Config | @vladmandic/human - v2.4.0</title><meta name="description" content="Documentation for @vladmandic/human - v2.4.0"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="stylesheet" href="../assets/style.css"/><link rel="stylesheet" href="../assets/highlight.css"/><script async src="../assets/search.js" id="search-script"></script></head><body><script>document.body.classList.add(localStorage.getItem(&quot;tsd-theme&quot;) || &quot;os&quot;)</script><header><div class="tsd-page-toolbar"><div class="container"><div class="table-wrap"><div class="table-cell" id="tsd-search" data-base=".."><div class="field"><label for="tsd-search-field" class="tsd-widget search no-caption">Search</label><input type="text" id="tsd-search-field"/></div><ul class="results"><li class="state loading">Preparing search index...</li><li class="state failure">The search index is not available</li></ul><a href="../index.html" class="title">@vladmandic/human - v2.4.0</a></div><div class="table-cell" id="tsd-widgets"><div id="tsd-filter"><a href="#" class="tsd-widget options no-caption" data-toggle="options">Options</a><div class="tsd-filter-group"><div class="tsd-select" id="tsd-filter-visibility"><span class="tsd-select-label">All</span><ul class="tsd-select-list"><li data-value="public">Public</li><li data-value="protected">Public/Protected</li><li data-value="private" class="selected">All</li></ul></div> <input type="checkbox" id="tsd-filter-inherited" checked/><label class="tsd-widget" for="tsd-filter-inherited">Inherited</label></div></div><a href="#" class="tsd-widget menu no-caption" data-toggle="menu">Menu</a></div></div></div></div><div class="tsd-page-title"><div class="container"><ul class="tsd-breadcrumb"><li><a href="../index.html">@vladmandic/human - v2.4.0</a></li><li><a href="Config.html">Config</a></li></ul><h1>Interface Config</h1></div></div></header><div class="container container-main"><div class="row"><div class="col-8 col-content"><section class="tsd-panel tsd-comment"><div class="tsd-comment tsd-typography"><div class="lead">
<p>Configuration interface definition for <strong>Human</strong> library</p> <p>Configuration interface definition for <strong>Human</strong> library</p>
</div><div><p>Contains all configurable parameters</p> </div><div><p>Contains all configurable parameters</p>
</div></div></section><section class="tsd-panel tsd-hierarchy"><h3>Hierarchy</h3><ul class="tsd-hierarchy"><li><span class="target">Config</span></li></ul></section><section class="tsd-panel-group tsd-index-group"><h2>Index</h2><section class="tsd-panel tsd-index-panel"><div class="tsd-index-content"><section class="tsd-index-section "><h3>Properties</h3><ul class="tsd-index-list"><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipFrame" class="tsd-kind-icon">skip<wbr/>Frame</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></section></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Properties</h2><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="async" class="tsd-anchor"></a><h3>async</h3><div class="tsd-signature tsd-kind-icon">async<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L238">config.ts:238</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead"> </div></div></section><section class="tsd-panel tsd-hierarchy"><h3>Hierarchy</h3><ul class="tsd-hierarchy"><li><span class="target">Config</span></li></ul></section><section class="tsd-panel-group tsd-index-group"><h2>Index</h2><section class="tsd-panel tsd-index-panel"><div class="tsd-index-content"><section class="tsd-index-section "><h3>Properties</h3><ul class="tsd-index-list"><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipAllowed" class="tsd-kind-icon">skip<wbr/>Allowed</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></section></div></section></section><section class="tsd-panel-group tsd-member-group "><h2>Properties</h2><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="async" class="tsd-anchor"></a><h3>async</h3><div class="tsd-signature tsd-kind-icon">async<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L238">config.ts:238</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Perform model loading and inference concurrently or sequentially</p> <p>Perform model loading and inference concurrently or sequentially</p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="backend" class="tsd-anchor"></a><h3>backend</h3><div class="tsd-signature tsd-kind-icon">backend<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">&quot;&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;cpu&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;wasm&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;webgl&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;humangl&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;tensorflow&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;webgpu&quot;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L226">config.ts:226</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead"> </div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="backend" class="tsd-anchor"></a><h3>backend</h3><div class="tsd-signature tsd-kind-icon">backend<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">&quot;&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;cpu&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;wasm&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;webgl&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;humangl&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;tensorflow&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;webgpu&quot;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L226">config.ts:226</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Backend used for TFJS operations <p>Backend used for TFJS operations
@ -46,7 +46,7 @@ Valid build-in backends are:</p>
<p><a href="ObjectConfig.html">ObjectConfig</a></p> <p><a href="ObjectConfig.html">ObjectConfig</a></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="segmentation" class="tsd-anchor"></a><h3>segmentation</h3><div class="tsd-signature tsd-kind-icon">segmentation<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="SegmentationConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">SegmentationConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L283">config.ts:283</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead"> </div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="segmentation" class="tsd-anchor"></a><h3>segmentation</h3><div class="tsd-signature tsd-kind-icon">segmentation<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Partial</span><span class="tsd-signature-symbol">&lt;</span><a href="SegmentationConfig.html" class="tsd-signature-type" data-tsd-kind="Interface">SegmentationConfig</a><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L283">config.ts:283</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p><a href="SegmentationConfig.html">SegmentationConfig</a></p> <p><a href="SegmentationConfig.html">SegmentationConfig</a></p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="skipFrame" class="tsd-anchor"></a><h3>skip<wbr/>Frame</h3><div class="tsd-signature tsd-kind-icon">skip<wbr/>Frame<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L258">config.ts:258</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead"> </div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="skipAllowed" class="tsd-anchor"></a><h3>skip<wbr/>Allowed</h3><div class="tsd-signature tsd-kind-icon">skip<wbr/>Allowed<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L258">config.ts:258</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>Internal Variable</p> <p>Internal Variable</p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="warmup" class="tsd-anchor"></a><h3>warmup</h3><div class="tsd-signature tsd-kind-icon">warmup<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">&quot;face&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;body&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;none&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;full&quot;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L243">config.ts:243</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead"> </div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="warmup" class="tsd-anchor"></a><h3>warmup</h3><div class="tsd-signature tsd-kind-icon">warmup<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">&quot;face&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;body&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;none&quot;</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">&quot;full&quot;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/config.ts#L243">config.ts:243</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>What to use for <code>human.warmup()</code></p> <p>What to use for <code>human.warmup()</code></p>
@ -58,4 +58,4 @@ Valid build-in backends are:</p>
<ul> <ul>
<li>if not set, auto-detects to link to CDN <code>jsdelivr</code> when running in browser</li> <li>if not set, auto-detects to link to CDN <code>jsdelivr</code> when running in browser</li>
</ul> </ul>
</div></div></section></section></div><div class="col-4 col-menu menu-sticky-wrap menu-highlight"><nav class="tsd-navigation primary"><ul><li class=""><a href="../index.html">Exports</a></li></ul></nav><nav class="tsd-navigation secondary menu-sticky"><ul><li class="current tsd-kind-interface"><a href="Config.html" class="tsd-kind-icon">Config</a><ul><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipFrame" class="tsd-kind-icon">skip<wbr/>Frame</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></li></ul></nav></div></div></div><footer class=""><div class="container"><h2>Legend</h2><div class="tsd-legend-group"><ul class="tsd-legend"><li class="tsd-kind-constructor tsd-parent-kind-class"><span class="tsd-kind-icon">Constructor</span></li><li class="tsd-kind-property tsd-parent-kind-class"><span class="tsd-kind-icon">Property</span></li><li class="tsd-kind-method tsd-parent-kind-class"><span class="tsd-kind-icon">Method</span></li></ul><ul class="tsd-legend"><li class="tsd-kind-property tsd-parent-kind-interface"><span class="tsd-kind-icon">Property</span></li></ul></div><h2>Settings</h2><p>Theme <select id="theme"><option value="os">OS</option><option value="light">Light</option><option value="dark">Dark</option></select></p></div></footer><div class="overlay"></div><script src="../assets/main.js"></script></body></html> </div></div></section></section></div><div class="col-4 col-menu menu-sticky-wrap menu-highlight"><nav class="tsd-navigation primary"><ul><li class=""><a href="../index.html">Exports</a></li></ul></nav><nav class="tsd-navigation secondary menu-sticky"><ul><li class="current tsd-kind-interface"><a href="Config.html" class="tsd-kind-icon">Config</a><ul><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#async" class="tsd-kind-icon">async</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#backend" class="tsd-kind-icon">backend</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#body" class="tsd-kind-icon">body</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#cacheSensitivity" class="tsd-kind-icon">cache<wbr/>Sensitivity</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#debug" class="tsd-kind-icon">debug</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#face" class="tsd-kind-icon">face</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#filter" class="tsd-kind-icon">filter</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#gesture" class="tsd-kind-icon">gesture</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#hand" class="tsd-kind-icon">hand</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#modelBasePath" class="tsd-kind-icon">model<wbr/>Base<wbr/>Path</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#object" class="tsd-kind-icon">object</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#segmentation" class="tsd-kind-icon">segmentation</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#skipAllowed" class="tsd-kind-icon">skip<wbr/>Allowed</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#warmup" class="tsd-kind-icon">warmup</a></li><li class="tsd-kind-property tsd-parent-kind-interface"><a href="Config.html#wasmPath" class="tsd-kind-icon">wasm<wbr/>Path</a></li></ul></li></ul></nav></div></div></div><footer class=""><div class="container"><h2>Legend</h2><div class="tsd-legend-group"><ul class="tsd-legend"><li class="tsd-kind-constructor tsd-parent-kind-class"><span class="tsd-kind-icon">Constructor</span></li><li class="tsd-kind-property tsd-parent-kind-class"><span class="tsd-kind-icon">Property</span></li><li class="tsd-kind-method tsd-parent-kind-class"><span class="tsd-kind-icon">Method</span></li></ul><ul class="tsd-legend"><li class="tsd-kind-property tsd-parent-kind-interface"><span class="tsd-kind-icon">Property</span></li></ul></div><h2>Settings</h2><p>Theme <select id="theme"><option value="os">OS</option><option value="light">Light</option><option value="dark">Dark</option></select></p></div></footer><div class="overlay"></div><script src="../assets/main.js"></script></body></html>

File diff suppressed because one or more lines are too long

View File

@ -13,7 +13,7 @@
<p><a href="HandResult.html">HandResult</a>: detection &amp; analysis results</p> <p><a href="HandResult.html">HandResult</a>: detection &amp; analysis results</p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="object" class="tsd-anchor"></a><h3>object</h3><div class="tsd-signature tsd-kind-icon">object<span class="tsd-signature-symbol">:</span> <a href="ObjectResult.html" class="tsd-signature-type" data-tsd-kind="Interface">ObjectResult</a><span class="tsd-signature-symbol">[]</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/result.ts#L195">result.ts:195</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead"> </div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="object" class="tsd-anchor"></a><h3>object</h3><div class="tsd-signature tsd-kind-icon">object<span class="tsd-signature-symbol">:</span> <a href="ObjectResult.html" class="tsd-signature-type" data-tsd-kind="Interface">ObjectResult</a><span class="tsd-signature-symbol">[]</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/result.ts#L195">result.ts:195</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p><a href="ObjectResult.html">ObjectResult</a>: detection &amp; analysis results</p> <p><a href="ObjectResult.html">ObjectResult</a>: detection &amp; analysis results</p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="performance" class="tsd-anchor"></a><h3>performance</h3><div class="tsd-signature tsd-kind-icon">performance<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/result.ts#L197">result.ts:197</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead"> </div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="performance" class="tsd-anchor"></a><h3>performance</h3><div class="tsd-signature tsd-kind-icon">performance<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">&gt;</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/result.ts#L197">result.ts:197</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>global performance object with timing values for each operation</p> <p>global performance object with timing values for each operation</p>
</div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="persons" class="tsd-anchor"></a><h3>persons</h3><div class="tsd-signature tsd-kind-icon">persons<span class="tsd-signature-symbol">:</span> <a href="PersonResult.html" class="tsd-signature-type" data-tsd-kind="Interface">PersonResult</a><span class="tsd-signature-symbol">[]</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/result.ts#L203">result.ts:203</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead"> </div></div></section><section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"><a id="persons" class="tsd-anchor"></a><h3>persons</h3><div class="tsd-signature tsd-kind-icon">persons<span class="tsd-signature-symbol">:</span> <a href="PersonResult.html" class="tsd-signature-type" data-tsd-kind="Interface">PersonResult</a><span class="tsd-signature-symbol">[]</span></div><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/result.ts#L203">result.ts:203</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
<p>getter property that returns unified persons object</p> <p>getter property that returns unified persons object</p>

View File

@ -1 +1 @@
{"version":3,"file":"blazepose.d.ts","sourceRoot":"","sources":["../../../src/body/blazepose.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,OAAO,KAAK,EAAgB,UAAU,EAAc,MAAM,WAAW,CAAC;AACtE,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAYxC,wBAAsB,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAWpE;AAED,wBAAsB,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAalE;AAED,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,UAAU,GAAG,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC,CAAC,CAI1F;AAoFD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CAWlF"} {"version":3,"file":"blazepose.d.ts","sourceRoot":"","sources":["../../../src/body/blazepose.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,OAAO,KAAK,EAAgB,UAAU,EAAc,MAAM,WAAW,CAAC;AACtE,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAYxC,wBAAsB,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAWpE;AAED,wBAAsB,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAalE;AAED,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,UAAU,GAAG,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC,CAAC,CAI1F;AAoFD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CAalF"}

View File

@ -1 +1 @@
{"version":3,"file":"efficientpose.d.ts","sourceRoot":"","sources":["../../../src/body/efficientpose.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,KAAK,EAAE,UAAU,EAAS,MAAM,WAAW,CAAC;AACnD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAaxC,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAQ9D;AAmBD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CA4ElF"} {"version":3,"file":"efficientpose.d.ts","sourceRoot":"","sources":["../../../src/body/efficientpose.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,KAAK,EAAE,UAAU,EAAS,MAAM,WAAW,CAAC;AACnD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAaxC,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAQ9D;AAmBD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CA8ElF"}

View File

@ -1 +1 @@
{"version":3,"file":"movenet.d.ts","sourceRoot":"","sources":["../../../src/body/movenet.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAOH,OAAO,KAAK,EAAgB,UAAU,EAAc,MAAM,WAAW,CAAC;AACtE,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAmBxC,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAW9D;AAyFD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CA2DlF"} {"version":3,"file":"movenet.d.ts","sourceRoot":"","sources":["../../../src/body/movenet.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAOH,OAAO,KAAK,EAAgB,UAAU,EAAc,MAAM,WAAW,CAAC;AACtE,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAmBxC,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAW9D;AAyFD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CA6DlF"}

View File

@ -229,7 +229,7 @@ export interface Config {
*/ */
cacheSensitivity: number; cacheSensitivity: number;
/** Internal Variable */ /** Internal Variable */
skipFrame: boolean; skipAllowed: boolean;
/** Run input through image filters before inference /** Run input through image filters before inference
* - image filters run with near-zero latency as they are executed on the GPU * - image filters run with near-zero latency as they are executed on the GPU
* *

View File

@ -1 +1 @@
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,aAAa;IAC5B,OAAO,EAAE,OAAO,CAAC;IACjB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,MAAM,CAAC;IACnB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED,2CAA2C;AAC3C,MAAM,WAAW,kBAAmB,SAAQ,aAAa;IACvD,QAAQ,EAAE,OAAO,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,OAAO,CAAC;CACjB;AAED,sCAAsC;AACtC,oBAAY,cAAc,GAAG,aAAa,CAAA;AAE1C,sCAAsC;AACtC,oBAAY,cAAc,GAAG,aAAa,CAAA;AAE1C;;GAEG;AACH,MAAM,WAAW,qBAAsB,SAAQ,aAAa;IAC1D,aAAa,EAAE,MAAM,CAAC;CACvB;AAED,yCAAyC;AACzC,MAAM,WAAW,iBAAkB,SAAQ,aAAa;IACtD,aAAa,EAAE,MAAM,CAAC;CACvB;AAED,yCAAyC;AACzC,oBAAY,mBAAmB,GAAG,aAAa,CAAA;AAE/C;;;;;;;;;;;EAWE;AACF,MAAM,WAAW,UAAU;IACzB,OAAO,EAAE,OAAO,CAAC;IACjB,QAAQ,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;IACtC,IAAI,EAAE,OAAO,CAAC,cAAc,CAAC,CAAC;IAC9B,IAAI,EAAE,OAAO,CAAC,cAAc,CAAC,CAAC;IAC9B,WAAW,EAAE,OAAO,CAAC,qBAAqB,CAAC,CAAC;IAC5C,OAAO,EAAE,OAAO,CAAC,iBAAiB,CAAC,CAAC;IACpC,SAAS,EAAE,OAAO,CAAC,mBAAmB,CAAC,CAAC;CACzC;AAED;;;;;;;;;;;;;;EAcE;AACF,MAAM,WAAW,UAAW,SAAQ,aAAa;IAC/C,WAAW,EAAE,MAAM,CAAC;IACpB,aAAa,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE;QACT,SAAS,EAAE,MAAM,CAAA;KAClB,CAAC;CACH;AAED;;;;;;;;;;;;;;;EAeE;AACF,MAAM,WAAW,UAAW,SAAQ,aAAa;IAC/C,QAAQ,EAAE,OAAO,CAAC;IAClB,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,EAAE,MAAM,CAAC;IACpB,SAAS,EAAE,OAAO,CAAC;IACnB,QAAQ,EAAE;QACR,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,CAAC;IACF,QAAQ,EAAE;QACR,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,CAAC;CACH;AAED;;;;;;;;;EASE;AACF,MAAM,WAAW,YAAa,SAAQ,aAAa;IACjD,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,EAAE,MAAM,CAAC;CACrB;AAED;;;;;;;;;;;;;EAaE;AACF,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,OAAO,CAAC;IACjB,SAAS,EAAE,MAAM,CAAC;IAClB,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;;EAGE;AACF,MAAM,WAAW,YAAY;IAC3B,OAAO,EAAE,OAAO,CAAC;IACjB;;;;MAIE;IACF,KAAK,EAAE,MAAM,CAAC;IACd;;;;MAIE;IACF,MAAM,EAAE,MAAM,CAAC;IACf,kDAAkD;IAClD,MAAM,EAAE,OAAO,CAAC;IAChB,iCAAiC;IACjC,IAAI,EAAE,OAAO,CAAC;IACd,wCAAwC;IACxC,UAAU,EAAE,MAAM,CAAC;IACnB,2DAA2D;IAC3D,QAAQ,EAAE,MAAM,CAAC;IACjB,yDAAyD;IACzD,SAAS,EAAE,MAAM,CAAC;IAClB,sDAAsD;IACtD,IAAI,EAAE,MAAM,CAAA;IACZ,+DAA+D;IAC/D,UAAU,EAAE,MAAM,CAAC;IACnB,4DAA4D;IAC5D,GAAG,EAAE,MAAM,CAAC;IACZ,qBAAqB;IACrB,QAAQ,EAAE,OAAO,CAAC;IAClB,yBAAyB;IACzB,KAAK,EAAE,OAAO,CAAC;IACf,2BAA2B;IAC3B,OAAO,EAAE,OAAO,CAAC;IACjB,8BAA8B;IAC9B,UAAU,EAAE,OAAO,CAAC;IACpB,+BAA+B;IAC/B,WAAW,EAAE,OAAO,CAAC;IACrB,mCAAmC;IACnC,QAAQ,EAAE,OAAO,CAAC;IAClB,iEAAiE;IACjE,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED,kCAAkC;AAClC,MAAM,WAAW,aAAa;IAC5B,OAAO,EAAE,OAAO,CAAC;CAClB;AAED;;;;;;;GAOG;AACH,MAAM,WAAW,MAAM;IACrB;;;;;;;;;MASE;IACF,OAAO,EAAE,EAAE,GAAG,KAAK,GAAG,MAAM,GAAG,OAAO,GAAG,SAAS,GAAG,YAAY,GAAG,QAAQ,CAAC;IAG7E;;MAEE;IACF,QAAQ,EAAE,MAAM,CAAC;IAEjB,wCAAwC;IACxC,KAAK,EAAE,OAAO,CAAC;IAEf,uEAAuE;IACvE,KAAK,EAAE,OAAO,CAAC;IAEf;;MAEE;IACF,MAAM,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC;IAG1C;;MAEE;IACF,aAAa,EAAE,MAAM,CAAC;IAEtB;;;MAGE;IACF,gBAAgB,EAAE,MAAM,CAAC;IAEzB,wBAAwB;IACxB,SAAS,EAAE,OAAO,CAAC;IAEnB;;;;MAIE;IACF,MAAM,EAAE,OAAO,CAAC,YAAY,CAAC,CAAC;IAE9B,4BAA4B;IAC5B,OAAO,EAAE,OAAO,CAAC,aAAa,CAAC,CAAC;IAEhC,yBAAyB;IACzB,IAAI,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;IAE1B,yBAAyB;IACzB,IAAI,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;IAE1B,yBAAyB;IACzB,IAAI,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;IAE1B,2BAA2B;IAC3B,MAAM,EAAE,OAAO,CAAC,YAAY,CAAC,CAAC;IAE9B,iCAAiC;IACjC,YAAY,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;CAC3C;AAED;;;GAGG;AACH,QAAA,MAAM,MAAM,EAAE,MAmLb,CAAC;AAEF,OAAO,EAAE,MAAM,IAAI,QAAQ,EAAE,CAAC"} {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,aAAa;IAC5B,OAAO,EAAE,OAAO,CAAC;IACjB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,MAAM,CAAC;IACnB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED,2CAA2C;AAC3C,MAAM,WAAW,kBAAmB,SAAQ,aAAa;IACvD,QAAQ,EAAE,OAAO,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,OAAO,CAAC;CACjB;AAED,sCAAsC;AACtC,oBAAY,cAAc,GAAG,aAAa,CAAA;AAE1C,sCAAsC;AACtC,oBAAY,cAAc,GAAG,aAAa,CAAA;AAE1C;;GAEG;AACH,MAAM,WAAW,qBAAsB,SAAQ,aAAa;IAC1D,aAAa,EAAE,MAAM,CAAC;CACvB;AAED,yCAAyC;AACzC,MAAM,WAAW,iBAAkB,SAAQ,aAAa;IACtD,aAAa,EAAE,MAAM,CAAC;CACvB;AAED,yCAAyC;AACzC,oBAAY,mBAAmB,GAAG,aAAa,CAAA;AAE/C;;;;;;;;;;;EAWE;AACF,MAAM,WAAW,UAAU;IACzB,OAAO,EAAE,OAAO,CAAC;IACjB,QAAQ,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;IACtC,IAAI,EAAE,OAAO,CAAC,cAAc,CAAC,CAAC;IAC9B,IAAI,EAAE,OAAO,CAAC,cAAc,CAAC,CAAC;IAC9B,WAAW,EAAE,OAAO,CAAC,qBAAqB,CAAC,CAAC;IAC5C,OAAO,EAAE,OAAO,CAAC,iBAAiB,CAAC,CAAC;IACpC,SAAS,EAAE,OAAO,CAAC,mBAAmB,CAAC,CAAC;CACzC;AAED;;;;;;;;;;;;;;EAcE;AACF,MAAM,WAAW,UAAW,SAAQ,aAAa;IAC/C,WAAW,EAAE,MAAM,CAAC;IACpB,aAAa,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE;QACT,SAAS,EAAE,MAAM,CAAA;KAClB,CAAC;CACH;AAED;;;;;;;;;;;;;;;EAeE;AACF,MAAM,WAAW,UAAW,SAAQ,aAAa;IAC/C,QAAQ,EAAE,OAAO,CAAC;IAClB,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,EAAE,MAAM,CAAC;IACpB,SAAS,EAAE,OAAO,CAAC;IACnB,QAAQ,EAAE;QACR,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,CAAC;IACF,QAAQ,EAAE;QACR,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,CAAC;CACH;AAED;;;;;;;;;EASE;AACF,MAAM,WAAW,YAAa,SAAQ,aAAa;IACjD,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,EAAE,MAAM,CAAC;CACrB;AAED;;;;;;;;;;;;;EAaE;AACF,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,OAAO,CAAC;IACjB,SAAS,EAAE,MAAM,CAAC;IAClB,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;;EAGE;AACF,MAAM,WAAW,YAAY;IAC3B,OAAO,EAAE,OAAO,CAAC;IACjB;;;;MAIE;IACF,KAAK,EAAE,MAAM,CAAC;IACd;;;;MAIE;IACF,MAAM,EAAE,MAAM,CAAC;IACf,kDAAkD;IAClD,MAAM,EAAE,OAAO,CAAC;IAChB,iCAAiC;IACjC,IAAI,EAAE,OAAO,CAAC;IACd,wCAAwC;IACxC,UAAU,EAAE,MAAM,CAAC;IACnB,2DAA2D;IAC3D,QAAQ,EAAE,MAAM,CAAC;IACjB,yDAAyD;IACzD,SAAS,EAAE,MAAM,CAAC;IAClB,sDAAsD;IACtD,IAAI,EAAE,MAAM,CAAA;IACZ,+DAA+D;IAC/D,UAAU,EAAE,MAAM,CAAC;IACnB,4DAA4D;IAC5D,GAAG,EAAE,MAAM,CAAC;IACZ,qBAAqB;IACrB,QAAQ,EAAE,OAAO,CAAC;IAClB,yBAAyB;IACzB,KAAK,EAAE,OAAO,CAAC;IACf,2BAA2B;IAC3B,OAAO,EAAE,OAAO,CAAC;IACjB,8BAA8B;IAC9B,UAAU,EAAE,OAAO,CAAC;IACpB,+BAA+B;IAC/B,WAAW,EAAE,OAAO,CAAC;IACrB,mCAAmC;IACnC,QAAQ,EAAE,OAAO,CAAC;IAClB,iEAAiE;IACjE,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED,kCAAkC;AAClC,MAAM,WAAW,aAAa;IAC5B,OAAO,EAAE,OAAO,CAAC;CAClB;AAED;;;;;;;GAOG;AACH,MAAM,WAAW,MAAM;IACrB;;;;;;;;;MASE;IACF,OAAO,EAAE,EAAE,GAAG,KAAK,GAAG,MAAM,GAAG,OAAO,GAAG,SAAS,GAAG,YAAY,GAAG,QAAQ,CAAC;IAG7E;;MAEE;IACF,QAAQ,EAAE,MAAM,CAAC;IAEjB,wCAAwC;IACxC,KAAK,EAAE,OAAO,CAAC;IAEf,uEAAuE;IACvE,KAAK,EAAE,OAAO,CAAC;IAEf;;MAEE;IACF,MAAM,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC;IAG1C;;MAEE;IACF,aAAa,EAAE,MAAM,CAAC;IAEtB;;;MAGE;IACF,gBAAgB,EAAE,MAAM,CAAC;IAEzB,wBAAwB;IACxB,WAAW,EAAE,OAAO,CAAC;IAErB;;;;MAIE;IACF,MAAM,EAAE,OAAO,CAAC,YAAY,CAAC,CAAC;IAE9B,4BAA4B;IAC5B,OAAO,EAAE,OAAO,CAAC,aAAa,CAAC,CAAC;IAEhC,yBAAyB;IACzB,IAAI,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;IAE1B,yBAAyB;IACzB,IAAI,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;IAE1B,yBAAyB;IACzB,IAAI,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;IAE1B,2BAA2B;IAC3B,MAAM,EAAE,OAAO,CAAC,YAAY,CAAC,CAAC;IAE9B,iCAAiC;IACjC,YAAY,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;CAC3C;AAED;;;GAGG;AACH,QAAA,MAAM,MAAM,EAAE,MAmLb,CAAC;AAEF,OAAO,EAAE,MAAM,IAAI,QAAQ,EAAE,CAAC"}

View File

@ -1 +1 @@
{"version":3,"file":"antispoof.d.ts","sourceRoot":"","sources":["../../../src/face/antispoof.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAUxD,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAQ9D;AAED,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,KAAA,EAAE,KAAK,KAAA,oBAiBtE"} {"version":3,"file":"antispoof.d.ts","sourceRoot":"","sources":["../../../src/face/antispoof.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAUxD,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAQ9D;AAED,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,KAAA,EAAE,KAAK,KAAA,oBAmBtE"}

View File

@ -1 +1 @@
{"version":3,"file":"face.d.ts","sourceRoot":"","sources":["../../../src/face/face.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAQH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,WAAW,CAAC;AAC5C,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAG5C,eAAO,MAAM,UAAU,uBAAiD,MAAM,KAAG,QAAQ,UAAU,EAAE,CAmIpG,CAAC"} {"version":3,"file":"face.d.ts","sourceRoot":"","sources":["../../../src/face/face.ts"],"names":[],"mappings":"AAAA;;;GAGG;AASH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,WAAW,CAAC;AAC5C,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAG5C,eAAO,MAAM,UAAU,uBAAiD,MAAM,KAAG,QAAQ,UAAU,EAAE,CAmIpG,CAAC"}

View File

@ -1 +1 @@
{"version":3,"file":"facemesh.d.ts","sourceRoot":"","sources":["../../../src/face/facemesh.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAQH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,UAAU,EAAS,MAAM,WAAW,CAAC;AACnD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAWxC,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CAiGlF;AAED,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAU9D;AAED,eAAO,MAAM,aAAa,UAAgB,CAAC;AAC3C,eAAO,MAAM,KAAK,oBAAe,CAAC"} {"version":3,"file":"facemesh.d.ts","sourceRoot":"","sources":["../../../src/face/facemesh.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAQH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,UAAU,EAAS,MAAM,WAAW,CAAC;AACnD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAWxC,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CAoGlF;AAED,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAU9D;AAED,eAAO,MAAM,aAAa,UAAgB,CAAC;AAC3C,eAAO,MAAM,KAAK,oBAAe,CAAC"}

View File

@ -1 +1 @@
{"version":3,"file":"faceres.d.ts","sourceRoot":"","sources":["../../../src/face/faceres.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAexC,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAS9D;AAED,wBAAgB,OAAO,CAAC,KAAK,KAAA,GAAG,MAAM,CAmDrC;AAED,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,KAAA,EAAE,KAAK,KAAA,oBA8CtE"} {"version":3,"file":"faceres.d.ts","sourceRoot":"","sources":["../../../src/face/faceres.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAexC,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAS9D;AAED,wBAAgB,OAAO,CAAC,KAAK,KAAA,GAAG,MAAM,CAmDrC;AAED,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,KAAA,EAAE,KAAK,KAAA,oBAgDtE"}

View File

@ -1 +1 @@
{"version":3,"file":"emotion.d.ts","sourceRoot":"","sources":["../../../src/gear/emotion.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAexD,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAQ9D;AAED,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,KAAA,EAAE,KAAK,KAAA,oBAwCtE"} {"version":3,"file":"emotion.d.ts","sourceRoot":"","sources":["../../../src/gear/emotion.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAexD,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAQ9D;AAED,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,KAAA,EAAE,KAAK,KAAA,oBA0CtE"}

View File

@ -1 +1 @@
{"version":3,"file":"gear-agegenderrace.d.ts","sourceRoot":"","sources":["../../../src/gear/gear-agegenderrace.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAUxD,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,GAAG,uBAQ9C;AAGD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,oBA2C1D"} {"version":3,"file":"gear-agegenderrace.d.ts","sourceRoot":"","sources":["../../../src/gear/gear-agegenderrace.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAUxD,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,GAAG,uBAQ9C;AAGD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,oBA+C1D"}

View File

@ -1 +1 @@
{"version":3,"file":"handtrack.d.ts","sourceRoot":"","sources":["../../../src/hand/handtrack.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAKH,OAAO,KAAK,EAAE,UAAU,EAAc,MAAM,WAAW,CAAC;AACxD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AA+CxC,wBAAsB,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAepE;AAED,wBAAsB,YAAY,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAWtE;AAED,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,UAAU,GAAG,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC,CAAC,CAI1F;AA0FD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CA8ClF"} {"version":3,"file":"handtrack.d.ts","sourceRoot":"","sources":["../../../src/hand/handtrack.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAKH,OAAO,KAAK,EAAE,UAAU,EAAc,MAAM,WAAW,CAAC;AACxD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AA+CxC,wBAAsB,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAepE;AAED,wBAAsB,YAAY,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAWtE;AAED,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,UAAU,GAAG,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC,CAAC,CAI1F;AA0FD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CA2ClF"}

View File

@ -1 +1 @@
{"version":3,"file":"human.d.ts","sourceRoot":"","sources":["../../src/human.ts"],"names":[],"mappings":"AAAA;;GAEG;AAKH,OAAO,EAAO,GAAG,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAK1C,OAAO,KAAK,IAAI,MAAM,aAAa,CAAC;AAGpC,OAAO,KAAK,QAAQ,MAAM,iBAAiB,CAAC;AAM5C,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,KAAK,MAAM,MAAM,UAAU,CAAC;AASnC,OAAO,KAAK,EAAE,MAAM,EAAiF,MAAM,UAAU,CAAC;AACtH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAC3C,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC/C,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAC3C,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAEvC,kEAAkE;AAClE,cAAc,UAAU,CAAC;AAEzB,6DAA6D;AAC7D,cAAc,UAAU,CAAC;AAEzB,yDAAyD;AACzD,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC/C,OAAO,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,YAAY,CAAC;AAEtC,2CAA2C;AAC3C,YAAY,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE/C,+BAA+B;AAC/B,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAEtC,4DAA4D;AAC5D,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAElC,+DAA+D;AAC/D,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAEtC;;;;;;;GAOG;AACH,oBAAY,MAAM,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,OAAO,CAAC;AAEjF;;GAEG;AACH,oBAAY,KAAK,GAAG;IAAE,KAAK,EAAE,MAAM,CAAA;CAAE,CAAC;AAEtC;;GAEG;AACH,oBAAY,UAAU,GAAG,OAAO,EAAE,CAAC;AAEnC;;;;;;;;;;GAUG;AACH,qBAAa,KAAK;;IAChB,0DAA0D;IAC1D,OAAO,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;;MAGE;IACF,MAAM,EAAE,MAAM,CAAC;IAEf;;;OAGG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd,iDAAiD;IACjD,OAAO,EAAE;QAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;QAAC,MAAM,EAAE,eAAe,GAAG,iBAAiB,GAAG,IAAI,CAAA;KAAE,CAAC;IAEvF;;;;;OAKG;IACH,EAAE,EAAE,UAAU,CAAC;IAEf,qEAAqE;IACrE,GAAG,EAAE,GAAG,CAAC;IAET;;;;;;;OAOG;IACH,IAAI,EAAE;QAAE,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC;QAAC,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QAAC,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QAAC,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QAAC,OAAO,EAAE,OAAO,IAAI,CAAC,OAAO,CAAC;QAAC,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC;QAAC,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC;QAAC,GAAG,EAAE,OAAO,IAAI,CAAC,GAAG,CAAC;QAAC,OAAO,EAAE,WAAW,CAAA;KAAE,CAAC;IAE/O;;;MAGE;IACF,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC;IAEtB;;;;;;;;;OASG;IACH,MAAM,EAAE,WAAW,GAAG,SAAS,CAAC;IAChC,oGAAoG;IACpG,iBAAiB,EAAE,OAAO,QAAQ,CAAC,aAAa,CAAC;IACjD,0EAA0E;IAC1E,SAAS,EAAE,OAAO,QAAQ,CAAC,KAAK,CAAC;IACjC,oFAAoF;IACpF,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAIpC,uBAAuB;IACvB,EAAE,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAG5B;;;;;OAKG;gBACS,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC;IA+CxC,cAAc;IACd,OAAO,WAAY,MAAM,EAAE,UAOzB;IAgBF,4CAA4C;IAC5C,KAAK,IAAI,IAAI;IAMb,4CAA4C;IACrC,QAAQ,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC;;;;;IAI5C,oCAAoC;IAC7B,UAAU,0BAAoB;IAC9B,QAAQ,wBAAkB;IAC1B,KAAK,qBAAe;IAE3B,4CAA4C;IAC5C,GAAG,IAAI,MAAM;IAIb;;;;OAIG;IACH,KAAK,CAAC,KAAK,EAAE,KAAK,EAAE,SAAS,GAAE,OAAc;;;;IAI7C;;;;;;;;;;;;OAYG;IACG,YAAY,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,CAAC,EAAE,KAAK,GAAG,OAAO,CAAC;QAAE,IAAI,EAAE,MAAM,EAAE,CAAC;QAAC,MAAM,EAAE,iBAAiB,GAAG,eAAe,GAAG,IAAI,CAAC;QAAC,KAAK,EAAE,iBAAiB,GAAG,eAAe,GAAG,IAAI,CAAA;KAAE,CAAC;IAIxL;;;;OAIG;IAEH,OAAO,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAIrC;;;;;;OAMG;IACG,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAK3B;;;;;MAKE;IACI,IAAI,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAgCvD,cAAc;IACd,IAAI,UAAW,MAAM,UAEnB;IAEF;;;;;OAKG;IACH,IAAI,CAAC,MAAM,GAAE,MAAoB,GAAG,MAAM;IAI1C;;;;;MAKE;IACI,MAAM,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG;QAAE,KAAK,MAAA;KAAE,CAAC;IAIvE;;;;;;;;;MASE;IACI,MAAM,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;CAkKlF;AAED,oCAAoC;AACpC,OAAO,EAAE,KAAK,IAAI,OAAO,EAAE,CAAC"} {"version":3,"file":"human.d.ts","sourceRoot":"","sources":["../../src/human.ts"],"names":[],"mappings":"AAAA;;GAEG;AAKH,OAAO,EAAO,GAAG,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAK1C,OAAO,KAAK,IAAI,MAAM,aAAa,CAAC;AAGpC,OAAO,KAAK,QAAQ,MAAM,iBAAiB,CAAC;AAM5C,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,KAAK,MAAM,MAAM,UAAU,CAAC;AASnC,OAAO,KAAK,EAAE,MAAM,EAAiF,MAAM,UAAU,CAAC;AACtH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAC3C,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC/C,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAC3C,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAEvC,kEAAkE;AAClE,cAAc,UAAU,CAAC;AAEzB,6DAA6D;AAC7D,cAAc,UAAU,CAAC;AAEzB,yDAAyD;AACzD,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC/C,OAAO,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,YAAY,CAAC;AAEtC,2CAA2C;AAC3C,YAAY,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE/C,+BAA+B;AAC/B,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAEtC,4DAA4D;AAC5D,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAElC,+DAA+D;AAC/D,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAEtC;;;;;;;GAOG;AACH,oBAAY,MAAM,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,OAAO,CAAC;AAEjF;;GAEG;AACH,oBAAY,KAAK,GAAG;IAAE,KAAK,EAAE,MAAM,CAAA;CAAE,CAAC;AAEtC;;GAEG;AACH,oBAAY,UAAU,GAAG,OAAO,EAAE,CAAC;AAEnC;;;;;;;;;;GAUG;AACH,qBAAa,KAAK;;IAChB,0DAA0D;IAC1D,OAAO,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;;MAGE;IACF,MAAM,EAAE,MAAM,CAAC;IAEf;;;OAGG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd,iDAAiD;IACjD,OAAO,EAAE;QAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;QAAC,MAAM,EAAE,eAAe,GAAG,iBAAiB,GAAG,IAAI,CAAA;KAAE,CAAC;IAEvF;;;;;OAKG;IACH,EAAE,EAAE,UAAU,CAAC;IAEf,qEAAqE;IACrE,GAAG,EAAE,GAAG,CAAC;IAET;;;;;;;OAOG;IACH,IAAI,EAAE;QAAE,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC;QAAC,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QAAC,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QAAC,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,CAAC;QAAC,OAAO,EAAE,OAAO,IAAI,CAAC,OAAO,CAAC;QAAC,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC;QAAC,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC;QAAC,GAAG,EAAE,OAAO,IAAI,CAAC,GAAG,CAAC;QAAC,OAAO,EAAE,WAAW,CAAA;KAAE,CAAC;IAE/O;;;MAGE;IACF,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC;IAEtB;;;;;;;;;OASG;IACH,MAAM,EAAE,WAAW,GAAG,SAAS,CAAC;IAChC,oGAAoG;IACpG,iBAAiB,EAAE,OAAO,QAAQ,CAAC,aAAa,CAAC;IACjD,0EAA0E;IAC1E,SAAS,EAAE,OAAO,QAAQ,CAAC,KAAK,CAAC;IACjC,oFAAoF;IACpF,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAIpC,uBAAuB;IACvB,EAAE,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAG5B;;;;;OAKG;gBACS,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC;IA+CxC,cAAc;IACd,OAAO,WAAY,MAAM,EAAE,UAOzB;IAgBF,4CAA4C;IAC5C,KAAK,IAAI,IAAI;IAMb,4CAA4C;IACrC,QAAQ,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC;;;;;IAI5C,oCAAoC;IAC7B,UAAU,0BAAoB;IAC9B,QAAQ,wBAAkB;IAC1B,KAAK,qBAAe;IAE3B,4CAA4C;IAC5C,GAAG,IAAI,MAAM;IAIb;;;;OAIG;IACH,KAAK,CAAC,KAAK,EAAE,KAAK,EAAE,SAAS,GAAE,OAAc;;;;IAI7C;;;;;;;;;;;;OAYG;IACG,YAAY,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,CAAC,EAAE,KAAK,GAAG,OAAO,CAAC;QAAE,IAAI,EAAE,MAAM,EAAE,CAAC;QAAC,MAAM,EAAE,iBAAiB,GAAG,eAAe,GAAG,IAAI,CAAC;QAAC,KAAK,EAAE,iBAAiB,GAAG,eAAe,GAAG,IAAI,CAAA;KAAE,CAAC;IAIxL;;;;OAIG;IAEH,OAAO,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAIrC;;;;;;OAMG;IACG,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAK3B;;;;;MAKE;IACI,IAAI,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAgCvD,cAAc;IACd,IAAI,UAAW,MAAM,UAEnB;IAEF;;;;;OAKG;IACH,IAAI,CAAC,MAAM,GAAE,MAAoB,GAAG,MAAM;IAI1C;;;;;MAKE;IACI,MAAM,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG;QAAE,KAAK,MAAA;KAAE,CAAC;IAIvE;;;;;;;;;MASE;IACI,MAAM,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;CA6JlF;AAED,oCAAoC;AACpC,OAAO,EAAE,KAAK,IAAI,OAAO,EAAE,CAAC"}

View File

@ -1 +1 @@
{"version":3,"file":"centernet.d.ts","sourceRoot":"","sources":["../../../src/object/centernet.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,KAAK,EAAE,YAAY,EAAO,MAAM,WAAW,CAAC;AACnD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAUxC,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAW9D;AAgDD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,EAAE,CAAC,CAmBpF"} {"version":3,"file":"centernet.d.ts","sourceRoot":"","sources":["../../../src/object/centernet.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,KAAK,EAAE,YAAY,EAAO,MAAM,WAAW,CAAC;AACnD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAUxC,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAW9D;AAgDD,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,EAAE,CAAC,CAqBpF"}

View File

@ -1 +1 @@
{"version":3,"file":"nanodet.d.ts","sourceRoot":"","sources":["../../../src/object/nanodet.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,KAAK,EAAE,YAAY,EAAO,MAAM,WAAW,CAAC;AACnD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAUxC,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAU9D;AA6ED,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,EAAE,CAAC,CAwBpF"} {"version":3,"file":"nanodet.d.ts","sourceRoot":"","sources":["../../../src/object/nanodet.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,KAAK,EAAE,YAAY,EAAO,MAAM,WAAW,CAAC;AACnD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAUxC,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAU9D;AA6ED,wBAAsB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,EAAE,CAAC,CA0BpF"}

View File

@ -202,7 +202,7 @@ export interface Result {
/** {@link ObjectResult}: detection & analysis results */ /** {@link ObjectResult}: detection & analysis results */
object: Array<ObjectResult>; object: Array<ObjectResult>;
/** global performance object with timing values for each operation */ /** global performance object with timing values for each operation */
performance: Record<string, unknown>; performance: Record<string, number>;
/** optional processed canvas that can be used to draw input on screen */ /** optional processed canvas that can be used to draw input on screen */
canvas?: OffscreenCanvas | HTMLCanvasElement | null | undefined; canvas?: OffscreenCanvas | HTMLCanvasElement | null | undefined;
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */ /** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */

View File

@ -1 +1 @@
{"version":3,"file":"result.d.ts","sourceRoot":"","sources":["../../src/result.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAC3C,OAAO,KAAK,EAAE,WAAW,EAAE,WAAW,EAAE,WAAW,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAE5F,oBAAY,GAAG,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;AACnD,oBAAY,KAAK,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;AAE9C;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,MAAM,WAAW,UAAU;IACzB,EAAE,EAAE,MAAM,CAAA;IACV,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,MAAM,CAAC;IAClB,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,EAAE,GAAG,CAAC;IACZ,IAAI,EAAE,KAAK,CAAC,KAAK,CAAC,CAAA;IAClB,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAA;IACrB,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC;IACrC,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,OAAO,CAAC,EAAE,KAAK,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IACpD,SAAS,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAC1B,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE;QACT,KAAK,EAAE;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC;QACpD,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;QACjF,IAAI,EAAE;YAAE,OAAO,EAAE,MAAM,CAAC;YAAC,QAAQ,EAAE,MAAM,CAAA;SAAE,CAAC;KAC7C,CAAA;IACD,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,oBAAY,YAAY,GAAG;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,KAAK,CAAC;IAChB,WAAW,EAAE,KAAK,CAAC;IACnB,KAAK,EAAE,MAAM,CAAC;CACf,CAAA;AAED;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,UAAU;IACzB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,EAAE,GAAG,CAAC;IACZ,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;IAC5C,SAAS,EAAE,KAAK,CAAC,YAAY,CAAC,CAAA;CAC/B;AAED;;;;;;;;;;GAUG;AACH,MAAM,WAAW,UAAU;IACzB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;IACjB,WAAW,EAAE,MAAM,CAAC;IACpB,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,EAAE,GAAG,CAAC;IACZ,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,CACjB,OAAO,GAAG,QAAQ,GAAG,OAAO,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM,EACxD,KAAK,CAAC,KAAK,CAAC,CACb,CAAC;IACF,SAAS,EAAE,MAAM,CACf,OAAO,GAAG,QAAQ,GAAG,OAAO,GAAG,MAAM,GAAG,OAAO,EAC/C;QAAE,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC;QAAC,SAAS,EAAE,YAAY,GAAG,cAAc,GAAG,gBAAgB,GAAG,iBAAiB,GAAG,iBAAiB,GAAG,gBAAgB,GAAG,mBAAmB,GAAG,kBAAkB,CAAA;KAAE,CACtM,CAAC;CACH;AAED;;;;;;;;;;;EAWE;AACF,MAAM,WAAW,YAAY;IAC3B,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,EAAE,GAAG,CAAC;CACb;AAED;;;;;;;GAOG;AACH,oBAAY,aAAa,GACvB;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,WAAW,CAAA;CAAE,GACtC;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,WAAW,CAAA;CAAE,GACxC;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,WAAW,CAAA;CAAE,GACxC;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,WAAW,CAAA;CAAE,CAAA;AAE5C;;;;;;;;;;;EAWE;AACF,MAAM,WAAW,YAAY;IAC3B,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,UAAU,CAAC;IACjB,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC;IACxB,KAAK,EAAE;QAAE,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC;QAAC,KAAK,EAAE,UAAU,GAAG,IAAI,CAAA;KAAE,CAAC;IAC7D,QAAQ,EAAE,KAAK,CAAC,aAAa,CAAC,CAAC;IAC/B,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,CAAC,EAAE,GAAG,CAAC;CACd;AAED;;;;GAIG;AACH,MAAM,WAAW,MAAM;IACrB,uDAAuD;IACvD,IAAI,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;IACxB,uDAAuD;IACvD,IAAI,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;IACxB,uDAAuD;IACvD,IAAI,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;IACxB,0DAA0D;IAC1D,OAAO,EAAE,KAAK,CAAC,aAAa,CAAC,CAAC;IAC9B,yDAAyD;IACzD,MAAM,EAAE,KAAK,CAAC,YAAY,CAAC,CAAA;IAC3B,sEAAsE;IACtE,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACrC,yEAAyE;IACzE,MAAM,CAAC,EAAE,eAAe,GAAG,iBAAiB,GAAG,IAAI,GAAG,SAAS,CAAC;IAChE,wFAAwF;IACxF,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,2DAA2D;IAC3D,OAAO,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC;CAC9B"} {"version":3,"file":"result.d.ts","sourceRoot":"","sources":["../../src/result.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAC3C,OAAO,KAAK,EAAE,WAAW,EAAE,WAAW,EAAE,WAAW,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAE5F,oBAAY,GAAG,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;AACnD,oBAAY,KAAK,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;AAE9C;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,MAAM,WAAW,UAAU;IACzB,EAAE,EAAE,MAAM,CAAA;IACV,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,MAAM,CAAC;IAClB,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,EAAE,GAAG,CAAC;IACZ,IAAI,EAAE,KAAK,CAAC,KAAK,CAAC,CAAA;IAClB,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAA;IACrB,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC;IACrC,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,OAAO,CAAC,EAAE,KAAK,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IACpD,SAAS,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAC1B,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE;QACT,KAAK,EAAE;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC;QACpD,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;QACjF,IAAI,EAAE;YAAE,OAAO,EAAE,MAAM,CAAC;YAAC,QAAQ,EAAE,MAAM,CAAA;SAAE,CAAC;KAC7C,CAAA;IACD,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,oBAAY,YAAY,GAAG;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,KAAK,CAAC;IAChB,WAAW,EAAE,KAAK,CAAC;IACnB,KAAK,EAAE,MAAM,CAAC;CACf,CAAA;AAED;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,UAAU;IACzB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,EAAE,GAAG,CAAC;IACZ,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;IAC5C,SAAS,EAAE,KAAK,CAAC,YAAY,CAAC,CAAA;CAC/B;AAED;;;;;;;;;;GAUG;AACH,MAAM,WAAW,UAAU;IACzB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;IACjB,WAAW,EAAE,MAAM,CAAC;IACpB,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,EAAE,GAAG,CAAC;IACZ,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,CACjB,OAAO,GAAG,QAAQ,GAAG,OAAO,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM,EACxD,KAAK,CAAC,KAAK,CAAC,CACb,CAAC;IACF,SAAS,EAAE,MAAM,CACf,OAAO,GAAG,QAAQ,GAAG,OAAO,GAAG,MAAM,GAAG,OAAO,EAC/C;QAAE,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC;QAAC,SAAS,EAAE,YAAY,GAAG,cAAc,GAAG,gBAAgB,GAAG,iBAAiB,GAAG,iBAAiB,GAAG,gBAAgB,GAAG,mBAAmB,GAAG,kBAAkB,CAAA;KAAE,CACtM,CAAC;CACH;AAED;;;;;;;;;;;EAWE;AACF,MAAM,WAAW,YAAY;IAC3B,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,EAAE,GAAG,CAAC;CACb;AAED;;;;;;;GAOG;AACH,oBAAY,aAAa,GACvB;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,WAAW,CAAA;CAAE,GACtC;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,WAAW,CAAA;CAAE,GACxC;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,WAAW,CAAA;CAAE,GACxC;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,WAAW,CAAA;CAAE,CAAA;AAE5C;;;;;;;;;;;EAWE;AACF,MAAM,WAAW,YAAY;IAC3B,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,UAAU,CAAC;IACjB,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC;IACxB,KAAK,EAAE;QAAE,IAAI,EAAE,UAAU,GAAG,IAAI,CAAC;QAAC,KAAK,EAAE,UAAU,GAAG,IAAI,CAAA;KAAE,CAAC;IAC7D,QAAQ,EAAE,KAAK,CAAC,aAAa,CAAC,CAAC;IAC/B,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,CAAC,EAAE,GAAG,CAAC;CACd;AAED;;;;GAIG;AACH,MAAM,WAAW,MAAM;IACrB,uDAAuD;IACvD,IAAI,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;IACxB,uDAAuD;IACvD,IAAI,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;IACxB,uDAAuD;IACvD,IAAI,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;IACxB,0DAA0D;IAC1D,OAAO,EAAE,KAAK,CAAC,aAAa,CAAC,CAAC;IAC9B,yDAAyD;IACzD,MAAM,EAAE,KAAK,CAAC,YAAY,CAAC,CAAA;IAC3B,sEAAsE;IACtE,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACpC,yEAAyE;IACzE,MAAM,CAAC,EAAE,eAAe,GAAG,iBAAiB,GAAG,IAAI,GAAG,SAAS,CAAC;IAChE,wFAAwF;IACxF,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,2DAA2D;IAC3D,OAAO,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC;CAC9B"}

View File

@ -1 +1 @@
{"version":3,"file":"draw.d.ts","sourceRoot":"","sources":["../../../src/util/draw.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,aAAa,EAAE,YAAY,EAAS,MAAM,WAAW,CAAC;AAE9H;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,MAAM,CAAC;IACpB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,OAAO,CAAC;IACpB,UAAU,EAAE,OAAO,CAAC;IACpB,YAAY,EAAE,OAAO,CAAC;IACtB,SAAS,EAAE,OAAO,CAAC;IACnB,YAAY,EAAE,OAAO,CAAC;IACtB,QAAQ,EAAE,OAAO,CAAC;IAClB,YAAY,EAAE,OAAO,CAAC;IACtB,QAAQ,EAAE,OAAO,CAAC;IAClB,SAAS,EAAE,OAAO,CAAC;IACnB,cAAc,EAAE,OAAO,CAAC;CACzB;AAED,eAAO,MAAM,OAAO,EAAE,WAmBrB,CAAC;AAmGF,wBAAsB,OAAO,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,aAAa,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBAyB5I;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBAuHtI;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBAwCtI;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBA+DtI;AAED,wBAAsB,MAAM,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,YAAY,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBAuB1I;AAED,wBAAsB,MAAM,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,YAAY,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBAwB1I;AAED,wBAAsB,MAAM,CAAC,KAAK,EAAE,iBAAiB,GAAG,eAAe,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,EAAE,MAAM,EAAE,iBAAiB,iBAI1J;AAED,wBAAsB,GAAG,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,MAAM,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,kDAc1H"} {"version":3,"file":"draw.d.ts","sourceRoot":"","sources":["../../../src/util/draw.ts"],"names":[],"mappings":"AAAA;;GAEG;AAKH,OAAO,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,aAAa,EAAE,YAAY,EAAS,MAAM,WAAW,CAAC;AAE9H;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,MAAM,CAAC;IACpB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,OAAO,CAAC;IACpB,UAAU,EAAE,OAAO,CAAC;IACpB,YAAY,EAAE,OAAO,CAAC;IACtB,SAAS,EAAE,OAAO,CAAC;IACnB,YAAY,EAAE,OAAO,CAAC;IACtB,QAAQ,EAAE,OAAO,CAAC;IAClB,YAAY,EAAE,OAAO,CAAC;IACtB,QAAQ,EAAE,OAAO,CAAC;IAClB,SAAS,EAAE,OAAO,CAAC;IACnB,cAAc,EAAE,OAAO,CAAC;CACzB;AAED,eAAO,MAAM,OAAO,EAAE,WAmBrB,CAAC;AAmGF,wBAAsB,OAAO,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,aAAa,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBAyB5I;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBAuHtI;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBAwCtI;AAED,wBAAsB,IAAI,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,UAAU,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBA+DtI;AAED,wBAAsB,MAAM,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,YAAY,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBAuB1I;AAED,wBAAsB,MAAM,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,KAAK,CAAC,YAAY,CAAC,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,iBAwB1I;AAED,wBAAsB,MAAM,CAAC,KAAK,EAAE,iBAAiB,GAAG,eAAe,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,EAAE,MAAM,EAAE,iBAAiB,iBAI1J;AAED,wBAAsB,GAAG,CAAC,QAAQ,EAAE,iBAAiB,GAAG,eAAe,EAAE,MAAM,EAAE,MAAM,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,kDAc1H"}

View File

@ -22,6 +22,8 @@ export declare class Env {
}; };
/** Is offscreenCanvas supported? */ /** Is offscreenCanvas supported? */
offscreen: undefined | boolean; offscreen: undefined | boolean;
/** Are performance counter instant values or additive */
perfadd: boolean;
/** WASM detected capabilities */ /** WASM detected capabilities */
wasm: { wasm: {
supported: undefined | boolean; supported: undefined | boolean;

View File

@ -1 +1 @@
{"version":3,"file":"env.d.ts","sourceRoot":"","sources":["../../../src/util/env.ts"],"names":[],"mappings":"AAGA,iDAAiD;AACjD,qBAAa,GAAG;IACd,yBAAyB;IACzB,OAAO,EAAE,OAAO,CAAC;IACjB,wBAAwB;IACxB,IAAI,EAAE,OAAO,CAAC;IACd,kCAAkC;IAClC,MAAM,EAAE,OAAO,CAAC;IAChB,wBAAwB;IACxB,QAAQ,EAAE,MAAM,CAAM;IACtB,qBAAqB;IACrB,KAAK,EAAE,MAAM,CAAM;IACnB,iCAAiC;IACjC,QAAQ,EAAE,MAAM,EAAE,CAAM;IACxB,yCAAyC;IACzC,OAAO,EAAE,OAAO,CAAC;IACjB,mCAAmC;IACnC,MAAM,EAAE,OAAO,GAAG,SAAS,CAAC;IAC5B,4BAA4B;IAC5B,IAAI,EAAE;QACJ,OAAO,EAAE,SAAS,GAAG,MAAM,CAAC;KAC7B,CAAC;IACF,oCAAoC;IACpC,SAAS,EAAE,SAAS,GAAG,OAAO,CAAC;IAC/B,iCAAiC;IACjC,IAAI,EAAE;QACJ,SAAS,EAAE,SAAS,GAAG,OAAO,CAAC;QAC/B,OAAO,EAAE,SAAS,GAAG,OAAO,CAAC;QAC7B,IAAI,EAAE,SAAS,GAAG,OAAO,CAAC;QAC1B,WAAW,EAAE,SAAS,GAAG,OAAO,CAAC;KAClC,CAKG;IACJ,kCAAkC;IAClC,KAAK,EAAE;QACL,SAAS,EAAE,SAAS,GAAG,OAAO,CAAC;QAC/B,OAAO,EAAE,SAAS,GAAG,OAAO,CAAC;QAC7B,OAAO,EAAE,SAAS,GAAG,MAAM,CAAC;QAC5B,QAAQ,EAAE,SAAS,GAAG,MAAM,CAAC;KAC9B,CAKG;IACJ,mCAAmC;IACnC,MAAM,EAAE;QACN,SAAS,EAAE,SAAS,GAAG,OAAO,CAAC;QAC/B,OAAO,EAAE,SAAS,GAAG,OAAO,CAAC;QAC7B,OAAO,EAAE,SAAS,GAAG,MAAM,CAAC;KAC7B,CAIG;IACJ,eAAe;IACf,GAAG,EAAE;QACH,KAAK,EAAE,SAAS,GAAG,MAAM,CAAC;QAC1B,KAAK,EAAE,MAAM,EAAE,CAAC;KACjB,CAGG;IACJ,oDAAoD;IACpD,OAAO,EAAE,MAAM,EAAE,CAAM;IACvB,6BAA6B;IAC7B,MAAM,EAAE,SAAS,CAAC;IAClB,4BAA4B;IAC5B,KAAK,EAAE,SAAS,CAAC;IACjB,gCAAgC;IAChC,SAAS,EAAE,SAAS,CAAC;;IAkCf,aAAa;IA6Bb,SAAS;CAoBhB;AAED,eAAO,MAAM,GAAG,KAAY,CAAC"} {"version":3,"file":"env.d.ts","sourceRoot":"","sources":["../../../src/util/env.ts"],"names":[],"mappings":"AAGA,iDAAiD;AACjD,qBAAa,GAAG;IACd,yBAAyB;IACzB,OAAO,EAAE,OAAO,CAAC;IACjB,wBAAwB;IACxB,IAAI,EAAE,OAAO,CAAC;IACd,kCAAkC;IAClC,MAAM,EAAE,OAAO,CAAC;IAChB,wBAAwB;IACxB,QAAQ,EAAE,MAAM,CAAM;IACtB,qBAAqB;IACrB,KAAK,EAAE,MAAM,CAAM;IACnB,iCAAiC;IACjC,QAAQ,EAAE,MAAM,EAAE,CAAM;IACxB,yCAAyC;IACzC,OAAO,EAAE,OAAO,CAAC;IACjB,mCAAmC;IACnC,MAAM,EAAE,OAAO,GAAG,SAAS,CAAC;IAC5B,4BAA4B;IAC5B,IAAI,EAAE;QACJ,OAAO,EAAE,SAAS,GAAG,MAAM,CAAC;KAC7B,CAAC;IACF,oCAAoC;IACpC,SAAS,EAAE,SAAS,GAAG,OAAO,CAAC;IAC/B,yDAAyD;IACzD,OAAO,EAAE,OAAO,CAAS;IACzB,iCAAiC;IACjC,IAAI,EAAE;QACJ,SAAS,EAAE,SAAS,GAAG,OAAO,CAAC;QAC/B,OAAO,EAAE,SAAS,GAAG,OAAO,CAAC;QAC7B,IAAI,EAAE,SAAS,GAAG,OAAO,CAAC;QAC1B,WAAW,EAAE,SAAS,GAAG,OAAO,CAAC;KAClC,CAKG;IACJ,kCAAkC;IAClC,KAAK,EAAE;QACL,SAAS,EAAE,SAAS,GAAG,OAAO,CAAC;QAC/B,OAAO,EAAE,SAAS,GAAG,OAAO,CAAC;QAC7B,OAAO,EAAE,SAAS,GAAG,MAAM,CAAC;QAC5B,QAAQ,EAAE,SAAS,GAAG,MAAM,CAAC;KAC9B,CAKG;IACJ,mCAAmC;IACnC,MAAM,EAAE;QACN,SAAS,EAAE,SAAS,GAAG,OAAO,CAAC;QAC/B,OAAO,EAAE,SAAS,GAAG,OAAO,CAAC;QAC7B,OAAO,EAAE,SAAS,GAAG,MAAM,CAAC;KAC7B,CAIG;IACJ,eAAe;IACf,GAAG,EAAE;QACH,KAAK,EAAE,SAAS,GAAG,MAAM,CAAC;QAC1B,KAAK,EAAE,MAAM,EAAE,CAAC;KACjB,CAGG;IACJ,oDAAoD;IACpD,OAAO,EAAE,MAAM,EAAE,CAAM;IACvB,6BAA6B;IAC7B,MAAM,EAAE,SAAS,CAAC;IAClB,4BAA4B;IAC5B,KAAK,EAAE,SAAS,CAAC;IACjB,gCAAgC;IAChC,SAAS,EAAE,SAAS,CAAC;;IAkCf,aAAa;IA6Bb,SAAS;CAoBhB;AAED,eAAO,MAAM,GAAG,KAAY,CAAC"}

2
wiki

@ -1 +1 @@
Subproject commit d6c2c8c474f1d55f36e4ab4ffc9a1852f2f2b4fb Subproject commit 97e86c65f64df007c25250bcb513d48e5c602242