mirror of https://github.com/vladmandic/human
enable body segmentation and background replacement
parent
ac25188c01
commit
fdf60830e3
|
@ -11,6 +11,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
### **HEAD -> main** 2021/06/04 mandic00@live.com
|
||||
|
||||
- added experimental body segmentation module
|
||||
- add meet and selfie models
|
||||
- add live hints to demo
|
||||
- switch worker from module to iife importscripts
|
||||
|
|
|
@ -65,6 +65,7 @@
|
|||
.icon { width: 180px; text-align: -webkit-center; text-align: -moz-center; filter: grayscale(1); }
|
||||
.icon:hover { background: #505050; filter: grayscale(0); }
|
||||
.hint { opacity: 0; transition-duration: 0.5s; transition-property: opacity; font-style: italic; position: fixed; top: 5rem; padding: 8px; margin: 8px; box-shadow: 0 0 2px 2px #303030; }
|
||||
.input-file { align-self: center; width: 5rem; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -340,16 +340,16 @@ function getBoxCenter(box6) {
|
|||
box6.startPoint[1] + (box6.endPoint[1] - box6.startPoint[1]) / 2
|
||||
];
|
||||
}
|
||||
function cutBoxFromImageAndResize(box6, image16, cropSize) {
|
||||
const h = image16.shape[1];
|
||||
const w = image16.shape[2];
|
||||
function cutBoxFromImageAndResize(box6, image17, cropSize) {
|
||||
const h = image17.shape[1];
|
||||
const w = image17.shape[2];
|
||||
const boxes = [[
|
||||
box6.startPoint[1] / h,
|
||||
box6.startPoint[0] / w,
|
||||
box6.endPoint[1] / h,
|
||||
box6.endPoint[0] / w
|
||||
]];
|
||||
return tf2.image.cropAndResize(image16, boxes, [0], cropSize);
|
||||
return tf2.image.cropAndResize(image17, boxes, [0], cropSize);
|
||||
}
|
||||
function enlargeBox(box6, factor = 1.5) {
|
||||
const center = getBoxCenter(box6);
|
||||
|
@ -4174,7 +4174,7 @@ async function load3(config3) {
|
|||
log("cached model:", model.modelUrl);
|
||||
return model;
|
||||
}
|
||||
async function predict2(image16, config3, idx, count2) {
|
||||
async function predict2(image17, config3, idx, count2) {
|
||||
if (!model)
|
||||
return null;
|
||||
if (skipped < config3.face.emotion.skipFrames && config3.skipFrame && lastCount === count2 && last[idx] && last[idx].length > 0) {
|
||||
|
@ -4183,7 +4183,7 @@ async function predict2(image16, config3, idx, count2) {
|
|||
}
|
||||
skipped = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const resize = tf6.image.resizeBilinear(image16, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
const resize = tf6.image.resizeBilinear(image17, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
const [red, green, blue] = tf6.split(resize, 3, 3);
|
||||
resize.dispose();
|
||||
const redNorm = tf6.mul(red, rgb[0]);
|
||||
|
@ -4259,7 +4259,7 @@ function match(embedding, db, threshold = 0) {
|
|||
return best;
|
||||
}
|
||||
function enhance(input) {
|
||||
const image16 = tf7.tidy(() => {
|
||||
const image17 = tf7.tidy(() => {
|
||||
const tensor2 = input.image || input.tensor || input;
|
||||
if (!(tensor2 instanceof tf7.Tensor))
|
||||
return null;
|
||||
|
@ -4270,9 +4270,9 @@ function enhance(input) {
|
|||
const norm = crop.mul(255);
|
||||
return norm;
|
||||
});
|
||||
return image16;
|
||||
return image17;
|
||||
}
|
||||
async function predict3(image16, config3, idx, count2) {
|
||||
async function predict3(image17, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model2)
|
||||
return null;
|
||||
|
@ -4282,7 +4282,7 @@ async function predict3(image16, config3, idx, count2) {
|
|||
}
|
||||
skipped2 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const enhanced = enhance(image16);
|
||||
const enhanced = enhance(image17);
|
||||
let resT;
|
||||
const obj = {
|
||||
age: 0,
|
||||
|
@ -4860,16 +4860,16 @@ function getBoxCenter2(box6) {
|
|||
box6.startPoint[1] + (box6.endPoint[1] - box6.startPoint[1]) / 2
|
||||
];
|
||||
}
|
||||
function cutBoxFromImageAndResize2(box6, image16, cropSize) {
|
||||
const h = image16.shape[1];
|
||||
const w = image16.shape[2];
|
||||
function cutBoxFromImageAndResize2(box6, image17, cropSize) {
|
||||
const h = image17.shape[1];
|
||||
const w = image17.shape[2];
|
||||
const boxes = [[
|
||||
box6.startPoint[1] / h,
|
||||
box6.startPoint[0] / w,
|
||||
box6.endPoint[1] / h,
|
||||
box6.endPoint[0] / w
|
||||
]];
|
||||
return tf10.image.cropAndResize(image16, boxes, [0], cropSize);
|
||||
return tf10.image.cropAndResize(image17, boxes, [0], cropSize);
|
||||
}
|
||||
function scaleBoxCoordinates2(box6, factor) {
|
||||
const startPoint = [box6.startPoint[0] * factor[0], box6.startPoint[1] * factor[1]];
|
||||
|
@ -7904,9 +7904,9 @@ var HandDetector = class {
|
|||
async estimateHandBounds(input, config3) {
|
||||
const inputHeight = input.shape[1];
|
||||
const inputWidth = input.shape[2];
|
||||
const image16 = tf11.tidy(() => input.resizeBilinear([this.inputSize, this.inputSize]).div(127.5).sub(1));
|
||||
const predictions = await this.getBoxes(image16, config3);
|
||||
image16.dispose();
|
||||
const image17 = tf11.tidy(() => input.resizeBilinear([this.inputSize, this.inputSize]).div(127.5).sub(1));
|
||||
const predictions = await this.getBoxes(image17, config3);
|
||||
image17.dispose();
|
||||
const hands = [];
|
||||
if (!predictions || predictions.length === 0)
|
||||
return hands;
|
||||
|
@ -8051,11 +8051,11 @@ var HandPipeline = class {
|
|||
Math.trunc(coord[2])
|
||||
]);
|
||||
}
|
||||
async estimateHands(image16, config3) {
|
||||
async estimateHands(image17, config3) {
|
||||
let useFreshBox = false;
|
||||
let boxes;
|
||||
if (this.skipped === 0 || this.skipped > config3.hand.skipFrames || !config3.hand.landmarks || !config3.skipFrame) {
|
||||
boxes = await this.handDetector.estimateHandBounds(image16, config3);
|
||||
boxes = await this.handDetector.estimateHandBounds(image17, config3);
|
||||
this.skipped = 0;
|
||||
}
|
||||
if (config3.skipFrame)
|
||||
|
@ -8074,8 +8074,8 @@ var HandPipeline = class {
|
|||
if (config3.hand.landmarks) {
|
||||
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
|
||||
const palmCenter = getBoxCenter2(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image16.shape[2], palmCenter[1] / image16.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && tf12.ENV.flags.IS_BROWSER ? tf12.image.rotateWithOffset(image16, angle, 0, palmCenterNormalized) : image16.clone();
|
||||
const palmCenterNormalized = [palmCenter[0] / image17.shape[2], palmCenter[1] / image17.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && tf12.ENV.flags.IS_BROWSER ? tf12.image.rotateWithOffset(image17, angle, 0, palmCenterNormalized) : image17.clone();
|
||||
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
|
||||
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
|
||||
|
@ -8299,14 +8299,14 @@ async function load7(config3) {
|
|||
log("cached model:", model4["modelUrl"]);
|
||||
return model4;
|
||||
}
|
||||
async function predict6(image16, config3) {
|
||||
async function predict6(image17, config3) {
|
||||
var _a;
|
||||
if (!model4)
|
||||
return [];
|
||||
if (!config3.body.enabled)
|
||||
return [];
|
||||
const imgSize = { width: image16.shape[2] || 0, height: image16.shape[1] || 0 };
|
||||
const resize = tf14.image.resizeBilinear(image16, [model4["width"], model4["height"]], false);
|
||||
const imgSize = { width: image17.shape[2] || 0, height: image17.shape[1] || 0 };
|
||||
const resize = tf14.image.resizeBilinear(image17, [model4["width"], model4["height"]], false);
|
||||
const normalize = tf14.div(resize, [255]);
|
||||
resize.dispose();
|
||||
const resT = await model4.predict(normalize);
|
||||
|
@ -8382,7 +8382,7 @@ function max2d(inputs, minScore) {
|
|||
return [0, 0, newScore];
|
||||
});
|
||||
}
|
||||
async function predict7(image16, config3) {
|
||||
async function predict7(image17, config3) {
|
||||
if (skipped3 < config3.body.skipFrames && config3.skipFrame && Object.keys(keypoints).length > 0) {
|
||||
skipped3++;
|
||||
return [{ id: 0, score, box: box4, boxRaw, keypoints }];
|
||||
|
@ -8392,7 +8392,7 @@ async function predict7(image16, config3) {
|
|||
const tensor2 = tf15.tidy(() => {
|
||||
if (!model5.inputs[0].shape)
|
||||
return null;
|
||||
const resize = tf15.image.resizeBilinear(image16, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const resize = tf15.image.resizeBilinear(image17, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const enhance2 = tf15.mul(resize, 2);
|
||||
const norm = enhance2.sub(1);
|
||||
return norm;
|
||||
|
@ -8418,8 +8418,8 @@ async function predict7(image16, config3) {
|
|||
y2 / model5.inputs[0].shape[1]
|
||||
],
|
||||
position: [
|
||||
Math.round(image16.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image16.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
Math.round(image17.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image17.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
|
@ -8467,7 +8467,7 @@ async function load9(config3) {
|
|||
log("cached model:", model6["modelUrl"]);
|
||||
return model6;
|
||||
}
|
||||
async function predict8(image16, config3) {
|
||||
async function predict8(image17, config3) {
|
||||
if (skipped4 < config3.body.skipFrames && config3.skipFrame && Object.keys(keypoints2).length > 0) {
|
||||
skipped4++;
|
||||
return [{ id: 0, score: score2, box: box5, boxRaw: boxRaw2, keypoints: keypoints2 }];
|
||||
|
@ -8477,7 +8477,7 @@ async function predict8(image16, config3) {
|
|||
const tensor2 = tf16.tidy(() => {
|
||||
if (!model6.inputs[0].shape)
|
||||
return null;
|
||||
const resize = tf16.image.resizeBilinear(image16, [model6.inputs[0].shape[2], model6.inputs[0].shape[1]], false);
|
||||
const resize = tf16.image.resizeBilinear(image17, [model6.inputs[0].shape[2], model6.inputs[0].shape[1]], false);
|
||||
const cast2 = tf16.cast(resize, "int32");
|
||||
return cast2;
|
||||
});
|
||||
|
@ -8501,8 +8501,8 @@ async function predict8(image16, config3) {
|
|||
kpt3[id][0]
|
||||
],
|
||||
position: [
|
||||
Math.round((image16.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image16.shape[1] || 0) * kpt3[id][0])
|
||||
Math.round((image17.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image17.shape[1] || 0) * kpt3[id][0])
|
||||
]
|
||||
});
|
||||
}
|
||||
|
@ -8697,15 +8697,15 @@ async function process2(res, inputSize, outputShape, config3) {
|
|||
results = results.filter((a, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
|
||||
return results;
|
||||
}
|
||||
async function predict9(image16, config3) {
|
||||
async function predict9(image17, config3) {
|
||||
if (skipped5 < config3.object.skipFrames && config3.skipFrame && last3.length > 0) {
|
||||
skipped5++;
|
||||
return last3;
|
||||
}
|
||||
skipped5 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const outputSize = [image16.shape[2], image16.shape[1]];
|
||||
const resize = tf17.image.resizeBilinear(image16, [model7.inputSize, model7.inputSize], false);
|
||||
const outputSize = [image17.shape[2], image17.shape[1]];
|
||||
const resize = tf17.image.resizeBilinear(image17, [model7.inputSize, model7.inputSize], false);
|
||||
const norm = resize.div(255);
|
||||
const transpose = norm.transpose([0, 3, 1, 2]);
|
||||
norm.dispose();
|
||||
|
@ -9059,8 +9059,8 @@ function GLImageFilter(params) {
|
|||
gl.uniform1f(_currentProgram.uniform.flipY, flipY ? -1 : 1);
|
||||
gl.drawArrays(gl.TRIANGLES, 0, 6);
|
||||
};
|
||||
this.apply = function(image16) {
|
||||
_resize(image16.width, image16.height);
|
||||
this.apply = function(image17) {
|
||||
_resize(image17.width, image17.height);
|
||||
_drawCount = 0;
|
||||
if (!_sourceTexture)
|
||||
_sourceTexture = gl.createTexture();
|
||||
|
@ -9069,7 +9069,7 @@ function GLImageFilter(params) {
|
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image16);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image17);
|
||||
if (_filterChain.length === 0) {
|
||||
_draw();
|
||||
return _canvas;
|
||||
|
@ -10433,9 +10433,9 @@ async function load12(config3) {
|
|||
async function predict11(input, config3) {
|
||||
var _a, _b, _c, _d;
|
||||
if (!config3.segmentation.enabled || !input.tensor || !input.canvas)
|
||||
return false;
|
||||
return null;
|
||||
if (!model9 || !model9.inputs[0].shape)
|
||||
return false;
|
||||
return null;
|
||||
const resizeInput = tf20.image.resizeBilinear(input.tensor, [model9.inputs[0].shape[1], model9.inputs[0].shape[2]], false);
|
||||
const norm = resizeInput.div(255);
|
||||
const res = model9.predict(norm);
|
||||
|
@ -10467,6 +10467,13 @@ async function predict11(input, config3) {
|
|||
tf20.dispose(resizeOutput);
|
||||
tf20.dispose(squeeze4);
|
||||
tf20.dispose(res);
|
||||
const alphaCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(input.canvas.width, input.canvas.height) : document.createElement("canvas");
|
||||
alphaCanvas.width = input.canvas.width;
|
||||
alphaCanvas.height = input.canvas.height;
|
||||
const ctxAlpha = alphaCanvas.getContext("2d");
|
||||
ctxAlpha.filter = "blur(8px";
|
||||
await ctxAlpha.drawImage(overlay, 0, 0);
|
||||
const alpha = ctxAlpha.getImageData(0, 0, input.canvas.width, input.canvas.height).data;
|
||||
const original = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(input.canvas.width, input.canvas.height) : document.createElement("canvas");
|
||||
original.width = input.canvas.width;
|
||||
original.height = input.canvas.height;
|
||||
|
@ -10475,10 +10482,43 @@ async function predict11(input, config3) {
|
|||
ctx.globalCompositeOperation = "darken";
|
||||
ctx.filter = "blur(8px)";
|
||||
await ctx.drawImage(overlay, 0, 0);
|
||||
ctx.globalCompositeOperation = "source-in";
|
||||
ctx.globalCompositeOperation = "source-over";
|
||||
ctx.filter = "none";
|
||||
input.canvas = original;
|
||||
return true;
|
||||
return alpha;
|
||||
}
|
||||
async function process5(input, background, config3) {
|
||||
var _a;
|
||||
if (!config3.segmentation.enabled)
|
||||
config3.segmentation.enabled = true;
|
||||
if (!model9)
|
||||
await load12(config3);
|
||||
const img = process4(input, config3);
|
||||
const alpha = await predict11(img, config3);
|
||||
tf20.dispose(img.tensor);
|
||||
if (background && alpha) {
|
||||
const tmp = process4(background, config3);
|
||||
const bg = tmp.canvas;
|
||||
tf20.dispose(tmp.tensor);
|
||||
const fg = img.canvas;
|
||||
const fgData = (_a = fg.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, fg.width, fg.height).data;
|
||||
const c = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(fg.width, fg.height) : document.createElement("canvas");
|
||||
c.width = fg.width;
|
||||
c.height = fg.height;
|
||||
const ctx = c.getContext("2d");
|
||||
ctx.globalCompositeOperation = "copy";
|
||||
ctx.drawImage(bg, 0, 0, c.width, c.height);
|
||||
const cData = ctx.getImageData(0, 0, c.width, c.height);
|
||||
for (let i = 0; i < c.width * c.height; i++) {
|
||||
cData.data[4 * i + 0] = (255 - alpha[4 * i + 0]) / 255 * cData.data[4 * i + 0] + alpha[4 * i + 0] / 255 * fgData[4 * i + 0];
|
||||
cData.data[4 * i + 1] = (255 - alpha[4 * i + 1]) / 255 * cData.data[4 * i + 1] + alpha[4 * i + 1] / 255 * fgData[4 * i + 1];
|
||||
cData.data[4 * i + 2] = (255 - alpha[4 * i + 2]) / 255 * cData.data[4 * i + 2] + alpha[4 * i + 2] / 255 * fgData[4 * i + 2];
|
||||
cData.data[4 * i + 3] = (255 - alpha[4 * i + 3]) / 255 * cData.data[4 * i + 3] + alpha[4 * i + 3] / 255 * fgData[4 * i + 3];
|
||||
}
|
||||
ctx.putImageData(cData, 0, 0);
|
||||
return c;
|
||||
}
|
||||
return img.canvas;
|
||||
}
|
||||
|
||||
// src/sample.ts
|
||||
|
@ -11423,6 +11463,9 @@ var Human = class {
|
|||
similarity(embedding1, embedding2) {
|
||||
return similarity(embedding1, embedding2);
|
||||
}
|
||||
segmentation(input, background) {
|
||||
return process5(input, background, this.config);
|
||||
}
|
||||
enhance(input) {
|
||||
return enhance(input);
|
||||
}
|
||||
|
@ -11525,8 +11568,8 @@ var Human = class {
|
|||
await __privateGet(this, _checkBackend).call(this);
|
||||
await this.load();
|
||||
timeStamp = now();
|
||||
const process5 = process4(input, this.config);
|
||||
if (!process5 || !process5.tensor) {
|
||||
const process6 = process4(input, this.config);
|
||||
if (!process6 || !process6.tensor) {
|
||||
log("could not convert input to tensor");
|
||||
resolve({ error: "could not convert input to tensor" });
|
||||
return;
|
||||
|
@ -11534,7 +11577,7 @@ var Human = class {
|
|||
this.performance.image = Math.trunc(now() - timeStamp);
|
||||
this.analyze("Get Image:");
|
||||
timeStamp = now();
|
||||
this.config.skipFrame = await __privateGet(this, _skipFrame).call(this, process5.tensor);
|
||||
this.config.skipFrame = await __privateGet(this, _skipFrame).call(this, process6.tensor);
|
||||
if (!this.performance.frames)
|
||||
this.performance.frames = 0;
|
||||
if (!this.performance.cached)
|
||||
|
@ -11550,13 +11593,13 @@ var Human = class {
|
|||
let objectRes;
|
||||
let elapsedTime;
|
||||
if (this.config.async) {
|
||||
faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : [];
|
||||
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
|
||||
if (this.performance.face)
|
||||
delete this.performance.face;
|
||||
} else {
|
||||
this.state = "run:face";
|
||||
timeStamp = now();
|
||||
faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : [];
|
||||
faceRes = this.config.face.enabled ? await detectFace(this, process6.tensor) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.face = elapsedTime;
|
||||
|
@ -11564,26 +11607,26 @@ var Human = class {
|
|||
this.analyze("Start Body:");
|
||||
if (this.config.async) {
|
||||
if (this.config.body.modelPath.includes("posenet"))
|
||||
bodyRes = this.config.body.enabled ? predict4(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict4(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("blazepose"))
|
||||
bodyRes = this.config.body.enabled ? predict6(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict6(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("efficientpose"))
|
||||
bodyRes = this.config.body.enabled ? predict7(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict7(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("movenet"))
|
||||
bodyRes = this.config.body.enabled ? predict8(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict8(process6.tensor, this.config) : [];
|
||||
if (this.performance.body)
|
||||
delete this.performance.body;
|
||||
} else {
|
||||
this.state = "run:body";
|
||||
timeStamp = now();
|
||||
if (this.config.body.modelPath.includes("posenet"))
|
||||
bodyRes = this.config.body.enabled ? await predict4(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict4(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("blazepose"))
|
||||
bodyRes = this.config.body.enabled ? await predict6(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict6(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("efficientpose"))
|
||||
bodyRes = this.config.body.enabled ? await predict7(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict7(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("movenet"))
|
||||
bodyRes = this.config.body.enabled ? await predict8(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict8(process6.tensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.body = elapsedTime;
|
||||
|
@ -11591,13 +11634,13 @@ var Human = class {
|
|||
this.analyze("End Body:");
|
||||
this.analyze("Start Hand:");
|
||||
if (this.config.async) {
|
||||
handRes = this.config.hand.enabled ? predict5(process5.tensor, this.config) : [];
|
||||
handRes = this.config.hand.enabled ? predict5(process6.tensor, this.config) : [];
|
||||
if (this.performance.hand)
|
||||
delete this.performance.hand;
|
||||
} else {
|
||||
this.state = "run:hand";
|
||||
timeStamp = now();
|
||||
handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : [];
|
||||
handRes = this.config.hand.enabled ? await predict5(process6.tensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.hand = elapsedTime;
|
||||
|
@ -11606,18 +11649,18 @@ var Human = class {
|
|||
this.analyze("Start Object:");
|
||||
if (this.config.async) {
|
||||
if (this.config.object.modelPath.includes("nanodet"))
|
||||
objectRes = this.config.object.enabled ? predict9(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? predict9(process6.tensor, this.config) : [];
|
||||
else if (this.config.object.modelPath.includes("centernet"))
|
||||
objectRes = this.config.object.enabled ? predict10(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? predict10(process6.tensor, this.config) : [];
|
||||
if (this.performance.object)
|
||||
delete this.performance.object;
|
||||
} else {
|
||||
this.state = "run:object";
|
||||
timeStamp = now();
|
||||
if (this.config.object.modelPath.includes("nanodet"))
|
||||
objectRes = this.config.object.enabled ? await predict9(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? await predict9(process6.tensor, this.config) : [];
|
||||
else if (this.config.object.modelPath.includes("centernet"))
|
||||
objectRes = this.config.object.enabled ? await predict10(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? await predict10(process6.tensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.object = elapsedTime;
|
||||
|
@ -11634,16 +11677,6 @@ var Human = class {
|
|||
else if (this.performance.gesture)
|
||||
delete this.performance.gesture;
|
||||
}
|
||||
if (this.config.segmentation.enabled) {
|
||||
this.analyze("Start Segmentation:");
|
||||
this.state = "run:segmentation";
|
||||
timeStamp = now();
|
||||
await predict11(process5, this.config);
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.segmentation = elapsedTime;
|
||||
this.analyze("End Segmentation:");
|
||||
}
|
||||
this.performance.total = Math.trunc(now() - timeStart);
|
||||
this.state = "idle";
|
||||
this.result = {
|
||||
|
@ -11653,14 +11686,14 @@ var Human = class {
|
|||
gesture: gestureRes,
|
||||
object: objectRes,
|
||||
performance: this.performance,
|
||||
canvas: process5.canvas,
|
||||
canvas: process6.canvas,
|
||||
timestamp: Date.now(),
|
||||
get persons() {
|
||||
var _a;
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process5 == null ? void 0 : process5.tensor) == null ? void 0 : _a.shape);
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process6 == null ? void 0 : process6.tensor) == null ? void 0 : _a.shape);
|
||||
}
|
||||
};
|
||||
tf21.dispose(process5.tensor);
|
||||
tf21.dispose(process6.tensor);
|
||||
resolve(this.result);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -341,16 +341,16 @@ function getBoxCenter(box6) {
|
|||
box6.startPoint[1] + (box6.endPoint[1] - box6.startPoint[1]) / 2
|
||||
];
|
||||
}
|
||||
function cutBoxFromImageAndResize(box6, image16, cropSize) {
|
||||
const h = image16.shape[1];
|
||||
const w = image16.shape[2];
|
||||
function cutBoxFromImageAndResize(box6, image17, cropSize) {
|
||||
const h = image17.shape[1];
|
||||
const w = image17.shape[2];
|
||||
const boxes = [[
|
||||
box6.startPoint[1] / h,
|
||||
box6.startPoint[0] / w,
|
||||
box6.endPoint[1] / h,
|
||||
box6.endPoint[0] / w
|
||||
]];
|
||||
return tf2.image.cropAndResize(image16, boxes, [0], cropSize);
|
||||
return tf2.image.cropAndResize(image17, boxes, [0], cropSize);
|
||||
}
|
||||
function enlargeBox(box6, factor = 1.5) {
|
||||
const center = getBoxCenter(box6);
|
||||
|
@ -4175,7 +4175,7 @@ async function load3(config3) {
|
|||
log("cached model:", model.modelUrl);
|
||||
return model;
|
||||
}
|
||||
async function predict2(image16, config3, idx, count2) {
|
||||
async function predict2(image17, config3, idx, count2) {
|
||||
if (!model)
|
||||
return null;
|
||||
if (skipped < config3.face.emotion.skipFrames && config3.skipFrame && lastCount === count2 && last[idx] && last[idx].length > 0) {
|
||||
|
@ -4184,7 +4184,7 @@ async function predict2(image16, config3, idx, count2) {
|
|||
}
|
||||
skipped = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const resize = tf6.image.resizeBilinear(image16, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
const resize = tf6.image.resizeBilinear(image17, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
const [red, green, blue] = tf6.split(resize, 3, 3);
|
||||
resize.dispose();
|
||||
const redNorm = tf6.mul(red, rgb[0]);
|
||||
|
@ -4260,7 +4260,7 @@ function match(embedding, db, threshold = 0) {
|
|||
return best;
|
||||
}
|
||||
function enhance(input) {
|
||||
const image16 = tf7.tidy(() => {
|
||||
const image17 = tf7.tidy(() => {
|
||||
const tensor2 = input.image || input.tensor || input;
|
||||
if (!(tensor2 instanceof tf7.Tensor))
|
||||
return null;
|
||||
|
@ -4271,9 +4271,9 @@ function enhance(input) {
|
|||
const norm = crop.mul(255);
|
||||
return norm;
|
||||
});
|
||||
return image16;
|
||||
return image17;
|
||||
}
|
||||
async function predict3(image16, config3, idx, count2) {
|
||||
async function predict3(image17, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model2)
|
||||
return null;
|
||||
|
@ -4283,7 +4283,7 @@ async function predict3(image16, config3, idx, count2) {
|
|||
}
|
||||
skipped2 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const enhanced = enhance(image16);
|
||||
const enhanced = enhance(image17);
|
||||
let resT;
|
||||
const obj = {
|
||||
age: 0,
|
||||
|
@ -4861,16 +4861,16 @@ function getBoxCenter2(box6) {
|
|||
box6.startPoint[1] + (box6.endPoint[1] - box6.startPoint[1]) / 2
|
||||
];
|
||||
}
|
||||
function cutBoxFromImageAndResize2(box6, image16, cropSize) {
|
||||
const h = image16.shape[1];
|
||||
const w = image16.shape[2];
|
||||
function cutBoxFromImageAndResize2(box6, image17, cropSize) {
|
||||
const h = image17.shape[1];
|
||||
const w = image17.shape[2];
|
||||
const boxes = [[
|
||||
box6.startPoint[1] / h,
|
||||
box6.startPoint[0] / w,
|
||||
box6.endPoint[1] / h,
|
||||
box6.endPoint[0] / w
|
||||
]];
|
||||
return tf10.image.cropAndResize(image16, boxes, [0], cropSize);
|
||||
return tf10.image.cropAndResize(image17, boxes, [0], cropSize);
|
||||
}
|
||||
function scaleBoxCoordinates2(box6, factor) {
|
||||
const startPoint = [box6.startPoint[0] * factor[0], box6.startPoint[1] * factor[1]];
|
||||
|
@ -7905,9 +7905,9 @@ var HandDetector = class {
|
|||
async estimateHandBounds(input, config3) {
|
||||
const inputHeight = input.shape[1];
|
||||
const inputWidth = input.shape[2];
|
||||
const image16 = tf11.tidy(() => input.resizeBilinear([this.inputSize, this.inputSize]).div(127.5).sub(1));
|
||||
const predictions = await this.getBoxes(image16, config3);
|
||||
image16.dispose();
|
||||
const image17 = tf11.tidy(() => input.resizeBilinear([this.inputSize, this.inputSize]).div(127.5).sub(1));
|
||||
const predictions = await this.getBoxes(image17, config3);
|
||||
image17.dispose();
|
||||
const hands = [];
|
||||
if (!predictions || predictions.length === 0)
|
||||
return hands;
|
||||
|
@ -8052,11 +8052,11 @@ var HandPipeline = class {
|
|||
Math.trunc(coord[2])
|
||||
]);
|
||||
}
|
||||
async estimateHands(image16, config3) {
|
||||
async estimateHands(image17, config3) {
|
||||
let useFreshBox = false;
|
||||
let boxes;
|
||||
if (this.skipped === 0 || this.skipped > config3.hand.skipFrames || !config3.hand.landmarks || !config3.skipFrame) {
|
||||
boxes = await this.handDetector.estimateHandBounds(image16, config3);
|
||||
boxes = await this.handDetector.estimateHandBounds(image17, config3);
|
||||
this.skipped = 0;
|
||||
}
|
||||
if (config3.skipFrame)
|
||||
|
@ -8075,8 +8075,8 @@ var HandPipeline = class {
|
|||
if (config3.hand.landmarks) {
|
||||
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
|
||||
const palmCenter = getBoxCenter2(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image16.shape[2], palmCenter[1] / image16.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && tf12.ENV.flags.IS_BROWSER ? tf12.image.rotateWithOffset(image16, angle, 0, palmCenterNormalized) : image16.clone();
|
||||
const palmCenterNormalized = [palmCenter[0] / image17.shape[2], palmCenter[1] / image17.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && tf12.ENV.flags.IS_BROWSER ? tf12.image.rotateWithOffset(image17, angle, 0, palmCenterNormalized) : image17.clone();
|
||||
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
|
||||
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
|
||||
|
@ -8300,14 +8300,14 @@ async function load7(config3) {
|
|||
log("cached model:", model4["modelUrl"]);
|
||||
return model4;
|
||||
}
|
||||
async function predict6(image16, config3) {
|
||||
async function predict6(image17, config3) {
|
||||
var _a;
|
||||
if (!model4)
|
||||
return [];
|
||||
if (!config3.body.enabled)
|
||||
return [];
|
||||
const imgSize = { width: image16.shape[2] || 0, height: image16.shape[1] || 0 };
|
||||
const resize = tf14.image.resizeBilinear(image16, [model4["width"], model4["height"]], false);
|
||||
const imgSize = { width: image17.shape[2] || 0, height: image17.shape[1] || 0 };
|
||||
const resize = tf14.image.resizeBilinear(image17, [model4["width"], model4["height"]], false);
|
||||
const normalize = tf14.div(resize, [255]);
|
||||
resize.dispose();
|
||||
const resT = await model4.predict(normalize);
|
||||
|
@ -8383,7 +8383,7 @@ function max2d(inputs, minScore) {
|
|||
return [0, 0, newScore];
|
||||
});
|
||||
}
|
||||
async function predict7(image16, config3) {
|
||||
async function predict7(image17, config3) {
|
||||
if (skipped3 < config3.body.skipFrames && config3.skipFrame && Object.keys(keypoints).length > 0) {
|
||||
skipped3++;
|
||||
return [{ id: 0, score, box: box4, boxRaw, keypoints }];
|
||||
|
@ -8393,7 +8393,7 @@ async function predict7(image16, config3) {
|
|||
const tensor2 = tf15.tidy(() => {
|
||||
if (!model5.inputs[0].shape)
|
||||
return null;
|
||||
const resize = tf15.image.resizeBilinear(image16, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const resize = tf15.image.resizeBilinear(image17, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const enhance2 = tf15.mul(resize, 2);
|
||||
const norm = enhance2.sub(1);
|
||||
return norm;
|
||||
|
@ -8419,8 +8419,8 @@ async function predict7(image16, config3) {
|
|||
y2 / model5.inputs[0].shape[1]
|
||||
],
|
||||
position: [
|
||||
Math.round(image16.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image16.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
Math.round(image17.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image17.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
|
@ -8468,7 +8468,7 @@ async function load9(config3) {
|
|||
log("cached model:", model6["modelUrl"]);
|
||||
return model6;
|
||||
}
|
||||
async function predict8(image16, config3) {
|
||||
async function predict8(image17, config3) {
|
||||
if (skipped4 < config3.body.skipFrames && config3.skipFrame && Object.keys(keypoints2).length > 0) {
|
||||
skipped4++;
|
||||
return [{ id: 0, score: score2, box: box5, boxRaw: boxRaw2, keypoints: keypoints2 }];
|
||||
|
@ -8478,7 +8478,7 @@ async function predict8(image16, config3) {
|
|||
const tensor2 = tf16.tidy(() => {
|
||||
if (!model6.inputs[0].shape)
|
||||
return null;
|
||||
const resize = tf16.image.resizeBilinear(image16, [model6.inputs[0].shape[2], model6.inputs[0].shape[1]], false);
|
||||
const resize = tf16.image.resizeBilinear(image17, [model6.inputs[0].shape[2], model6.inputs[0].shape[1]], false);
|
||||
const cast2 = tf16.cast(resize, "int32");
|
||||
return cast2;
|
||||
});
|
||||
|
@ -8502,8 +8502,8 @@ async function predict8(image16, config3) {
|
|||
kpt3[id][0]
|
||||
],
|
||||
position: [
|
||||
Math.round((image16.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image16.shape[1] || 0) * kpt3[id][0])
|
||||
Math.round((image17.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image17.shape[1] || 0) * kpt3[id][0])
|
||||
]
|
||||
});
|
||||
}
|
||||
|
@ -8698,15 +8698,15 @@ async function process2(res, inputSize, outputShape, config3) {
|
|||
results = results.filter((a, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
|
||||
return results;
|
||||
}
|
||||
async function predict9(image16, config3) {
|
||||
async function predict9(image17, config3) {
|
||||
if (skipped5 < config3.object.skipFrames && config3.skipFrame && last3.length > 0) {
|
||||
skipped5++;
|
||||
return last3;
|
||||
}
|
||||
skipped5 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const outputSize = [image16.shape[2], image16.shape[1]];
|
||||
const resize = tf17.image.resizeBilinear(image16, [model7.inputSize, model7.inputSize], false);
|
||||
const outputSize = [image17.shape[2], image17.shape[1]];
|
||||
const resize = tf17.image.resizeBilinear(image17, [model7.inputSize, model7.inputSize], false);
|
||||
const norm = resize.div(255);
|
||||
const transpose = norm.transpose([0, 3, 1, 2]);
|
||||
norm.dispose();
|
||||
|
@ -9060,8 +9060,8 @@ function GLImageFilter(params) {
|
|||
gl.uniform1f(_currentProgram.uniform.flipY, flipY ? -1 : 1);
|
||||
gl.drawArrays(gl.TRIANGLES, 0, 6);
|
||||
};
|
||||
this.apply = function(image16) {
|
||||
_resize(image16.width, image16.height);
|
||||
this.apply = function(image17) {
|
||||
_resize(image17.width, image17.height);
|
||||
_drawCount = 0;
|
||||
if (!_sourceTexture)
|
||||
_sourceTexture = gl.createTexture();
|
||||
|
@ -9070,7 +9070,7 @@ function GLImageFilter(params) {
|
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image16);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image17);
|
||||
if (_filterChain.length === 0) {
|
||||
_draw();
|
||||
return _canvas;
|
||||
|
@ -10434,9 +10434,9 @@ async function load12(config3) {
|
|||
async function predict11(input, config3) {
|
||||
var _a, _b, _c, _d;
|
||||
if (!config3.segmentation.enabled || !input.tensor || !input.canvas)
|
||||
return false;
|
||||
return null;
|
||||
if (!model9 || !model9.inputs[0].shape)
|
||||
return false;
|
||||
return null;
|
||||
const resizeInput = tf20.image.resizeBilinear(input.tensor, [model9.inputs[0].shape[1], model9.inputs[0].shape[2]], false);
|
||||
const norm = resizeInput.div(255);
|
||||
const res = model9.predict(norm);
|
||||
|
@ -10468,6 +10468,13 @@ async function predict11(input, config3) {
|
|||
tf20.dispose(resizeOutput);
|
||||
tf20.dispose(squeeze4);
|
||||
tf20.dispose(res);
|
||||
const alphaCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(input.canvas.width, input.canvas.height) : document.createElement("canvas");
|
||||
alphaCanvas.width = input.canvas.width;
|
||||
alphaCanvas.height = input.canvas.height;
|
||||
const ctxAlpha = alphaCanvas.getContext("2d");
|
||||
ctxAlpha.filter = "blur(8px";
|
||||
await ctxAlpha.drawImage(overlay, 0, 0);
|
||||
const alpha = ctxAlpha.getImageData(0, 0, input.canvas.width, input.canvas.height).data;
|
||||
const original = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(input.canvas.width, input.canvas.height) : document.createElement("canvas");
|
||||
original.width = input.canvas.width;
|
||||
original.height = input.canvas.height;
|
||||
|
@ -10476,10 +10483,43 @@ async function predict11(input, config3) {
|
|||
ctx.globalCompositeOperation = "darken";
|
||||
ctx.filter = "blur(8px)";
|
||||
await ctx.drawImage(overlay, 0, 0);
|
||||
ctx.globalCompositeOperation = "source-in";
|
||||
ctx.globalCompositeOperation = "source-over";
|
||||
ctx.filter = "none";
|
||||
input.canvas = original;
|
||||
return true;
|
||||
return alpha;
|
||||
}
|
||||
async function process5(input, background, config3) {
|
||||
var _a;
|
||||
if (!config3.segmentation.enabled)
|
||||
config3.segmentation.enabled = true;
|
||||
if (!model9)
|
||||
await load12(config3);
|
||||
const img = process4(input, config3);
|
||||
const alpha = await predict11(img, config3);
|
||||
tf20.dispose(img.tensor);
|
||||
if (background && alpha) {
|
||||
const tmp = process4(background, config3);
|
||||
const bg = tmp.canvas;
|
||||
tf20.dispose(tmp.tensor);
|
||||
const fg = img.canvas;
|
||||
const fgData = (_a = fg.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, fg.width, fg.height).data;
|
||||
const c = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(fg.width, fg.height) : document.createElement("canvas");
|
||||
c.width = fg.width;
|
||||
c.height = fg.height;
|
||||
const ctx = c.getContext("2d");
|
||||
ctx.globalCompositeOperation = "copy";
|
||||
ctx.drawImage(bg, 0, 0, c.width, c.height);
|
||||
const cData = ctx.getImageData(0, 0, c.width, c.height);
|
||||
for (let i = 0; i < c.width * c.height; i++) {
|
||||
cData.data[4 * i + 0] = (255 - alpha[4 * i + 0]) / 255 * cData.data[4 * i + 0] + alpha[4 * i + 0] / 255 * fgData[4 * i + 0];
|
||||
cData.data[4 * i + 1] = (255 - alpha[4 * i + 1]) / 255 * cData.data[4 * i + 1] + alpha[4 * i + 1] / 255 * fgData[4 * i + 1];
|
||||
cData.data[4 * i + 2] = (255 - alpha[4 * i + 2]) / 255 * cData.data[4 * i + 2] + alpha[4 * i + 2] / 255 * fgData[4 * i + 2];
|
||||
cData.data[4 * i + 3] = (255 - alpha[4 * i + 3]) / 255 * cData.data[4 * i + 3] + alpha[4 * i + 3] / 255 * fgData[4 * i + 3];
|
||||
}
|
||||
ctx.putImageData(cData, 0, 0);
|
||||
return c;
|
||||
}
|
||||
return img.canvas;
|
||||
}
|
||||
|
||||
// src/sample.ts
|
||||
|
@ -11424,6 +11464,9 @@ var Human = class {
|
|||
similarity(embedding1, embedding2) {
|
||||
return similarity(embedding1, embedding2);
|
||||
}
|
||||
segmentation(input, background) {
|
||||
return process5(input, background, this.config);
|
||||
}
|
||||
enhance(input) {
|
||||
return enhance(input);
|
||||
}
|
||||
|
@ -11526,8 +11569,8 @@ var Human = class {
|
|||
await __privateGet(this, _checkBackend).call(this);
|
||||
await this.load();
|
||||
timeStamp = now();
|
||||
const process5 = process4(input, this.config);
|
||||
if (!process5 || !process5.tensor) {
|
||||
const process6 = process4(input, this.config);
|
||||
if (!process6 || !process6.tensor) {
|
||||
log("could not convert input to tensor");
|
||||
resolve({ error: "could not convert input to tensor" });
|
||||
return;
|
||||
|
@ -11535,7 +11578,7 @@ var Human = class {
|
|||
this.performance.image = Math.trunc(now() - timeStamp);
|
||||
this.analyze("Get Image:");
|
||||
timeStamp = now();
|
||||
this.config.skipFrame = await __privateGet(this, _skipFrame).call(this, process5.tensor);
|
||||
this.config.skipFrame = await __privateGet(this, _skipFrame).call(this, process6.tensor);
|
||||
if (!this.performance.frames)
|
||||
this.performance.frames = 0;
|
||||
if (!this.performance.cached)
|
||||
|
@ -11551,13 +11594,13 @@ var Human = class {
|
|||
let objectRes;
|
||||
let elapsedTime;
|
||||
if (this.config.async) {
|
||||
faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : [];
|
||||
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
|
||||
if (this.performance.face)
|
||||
delete this.performance.face;
|
||||
} else {
|
||||
this.state = "run:face";
|
||||
timeStamp = now();
|
||||
faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : [];
|
||||
faceRes = this.config.face.enabled ? await detectFace(this, process6.tensor) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.face = elapsedTime;
|
||||
|
@ -11565,26 +11608,26 @@ var Human = class {
|
|||
this.analyze("Start Body:");
|
||||
if (this.config.async) {
|
||||
if (this.config.body.modelPath.includes("posenet"))
|
||||
bodyRes = this.config.body.enabled ? predict4(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict4(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("blazepose"))
|
||||
bodyRes = this.config.body.enabled ? predict6(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict6(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("efficientpose"))
|
||||
bodyRes = this.config.body.enabled ? predict7(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict7(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("movenet"))
|
||||
bodyRes = this.config.body.enabled ? predict8(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict8(process6.tensor, this.config) : [];
|
||||
if (this.performance.body)
|
||||
delete this.performance.body;
|
||||
} else {
|
||||
this.state = "run:body";
|
||||
timeStamp = now();
|
||||
if (this.config.body.modelPath.includes("posenet"))
|
||||
bodyRes = this.config.body.enabled ? await predict4(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict4(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("blazepose"))
|
||||
bodyRes = this.config.body.enabled ? await predict6(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict6(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("efficientpose"))
|
||||
bodyRes = this.config.body.enabled ? await predict7(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict7(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("movenet"))
|
||||
bodyRes = this.config.body.enabled ? await predict8(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict8(process6.tensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.body = elapsedTime;
|
||||
|
@ -11592,13 +11635,13 @@ var Human = class {
|
|||
this.analyze("End Body:");
|
||||
this.analyze("Start Hand:");
|
||||
if (this.config.async) {
|
||||
handRes = this.config.hand.enabled ? predict5(process5.tensor, this.config) : [];
|
||||
handRes = this.config.hand.enabled ? predict5(process6.tensor, this.config) : [];
|
||||
if (this.performance.hand)
|
||||
delete this.performance.hand;
|
||||
} else {
|
||||
this.state = "run:hand";
|
||||
timeStamp = now();
|
||||
handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : [];
|
||||
handRes = this.config.hand.enabled ? await predict5(process6.tensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.hand = elapsedTime;
|
||||
|
@ -11607,18 +11650,18 @@ var Human = class {
|
|||
this.analyze("Start Object:");
|
||||
if (this.config.async) {
|
||||
if (this.config.object.modelPath.includes("nanodet"))
|
||||
objectRes = this.config.object.enabled ? predict9(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? predict9(process6.tensor, this.config) : [];
|
||||
else if (this.config.object.modelPath.includes("centernet"))
|
||||
objectRes = this.config.object.enabled ? predict10(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? predict10(process6.tensor, this.config) : [];
|
||||
if (this.performance.object)
|
||||
delete this.performance.object;
|
||||
} else {
|
||||
this.state = "run:object";
|
||||
timeStamp = now();
|
||||
if (this.config.object.modelPath.includes("nanodet"))
|
||||
objectRes = this.config.object.enabled ? await predict9(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? await predict9(process6.tensor, this.config) : [];
|
||||
else if (this.config.object.modelPath.includes("centernet"))
|
||||
objectRes = this.config.object.enabled ? await predict10(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? await predict10(process6.tensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.object = elapsedTime;
|
||||
|
@ -11635,16 +11678,6 @@ var Human = class {
|
|||
else if (this.performance.gesture)
|
||||
delete this.performance.gesture;
|
||||
}
|
||||
if (this.config.segmentation.enabled) {
|
||||
this.analyze("Start Segmentation:");
|
||||
this.state = "run:segmentation";
|
||||
timeStamp = now();
|
||||
await predict11(process5, this.config);
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.segmentation = elapsedTime;
|
||||
this.analyze("End Segmentation:");
|
||||
}
|
||||
this.performance.total = Math.trunc(now() - timeStart);
|
||||
this.state = "idle";
|
||||
this.result = {
|
||||
|
@ -11654,14 +11687,14 @@ var Human = class {
|
|||
gesture: gestureRes,
|
||||
object: objectRes,
|
||||
performance: this.performance,
|
||||
canvas: process5.canvas,
|
||||
canvas: process6.canvas,
|
||||
timestamp: Date.now(),
|
||||
get persons() {
|
||||
var _a;
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process5 == null ? void 0 : process5.tensor) == null ? void 0 : _a.shape);
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process6 == null ? void 0 : process6.tensor) == null ? void 0 : _a.shape);
|
||||
}
|
||||
};
|
||||
tf21.dispose(process5.tensor);
|
||||
tf21.dispose(process6.tensor);
|
||||
resolve(this.result);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -340,16 +340,16 @@ function getBoxCenter(box6) {
|
|||
box6.startPoint[1] + (box6.endPoint[1] - box6.startPoint[1]) / 2
|
||||
];
|
||||
}
|
||||
function cutBoxFromImageAndResize(box6, image16, cropSize) {
|
||||
const h = image16.shape[1];
|
||||
const w = image16.shape[2];
|
||||
function cutBoxFromImageAndResize(box6, image17, cropSize) {
|
||||
const h = image17.shape[1];
|
||||
const w = image17.shape[2];
|
||||
const boxes = [[
|
||||
box6.startPoint[1] / h,
|
||||
box6.startPoint[0] / w,
|
||||
box6.endPoint[1] / h,
|
||||
box6.endPoint[0] / w
|
||||
]];
|
||||
return tf2.image.cropAndResize(image16, boxes, [0], cropSize);
|
||||
return tf2.image.cropAndResize(image17, boxes, [0], cropSize);
|
||||
}
|
||||
function enlargeBox(box6, factor = 1.5) {
|
||||
const center = getBoxCenter(box6);
|
||||
|
@ -4174,7 +4174,7 @@ async function load3(config3) {
|
|||
log("cached model:", model.modelUrl);
|
||||
return model;
|
||||
}
|
||||
async function predict2(image16, config3, idx, count2) {
|
||||
async function predict2(image17, config3, idx, count2) {
|
||||
if (!model)
|
||||
return null;
|
||||
if (skipped < config3.face.emotion.skipFrames && config3.skipFrame && lastCount === count2 && last[idx] && last[idx].length > 0) {
|
||||
|
@ -4183,7 +4183,7 @@ async function predict2(image16, config3, idx, count2) {
|
|||
}
|
||||
skipped = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const resize = tf6.image.resizeBilinear(image16, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
const resize = tf6.image.resizeBilinear(image17, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
const [red, green, blue] = tf6.split(resize, 3, 3);
|
||||
resize.dispose();
|
||||
const redNorm = tf6.mul(red, rgb[0]);
|
||||
|
@ -4259,7 +4259,7 @@ function match(embedding, db, threshold = 0) {
|
|||
return best;
|
||||
}
|
||||
function enhance(input) {
|
||||
const image16 = tf7.tidy(() => {
|
||||
const image17 = tf7.tidy(() => {
|
||||
const tensor2 = input.image || input.tensor || input;
|
||||
if (!(tensor2 instanceof tf7.Tensor))
|
||||
return null;
|
||||
|
@ -4270,9 +4270,9 @@ function enhance(input) {
|
|||
const norm = crop.mul(255);
|
||||
return norm;
|
||||
});
|
||||
return image16;
|
||||
return image17;
|
||||
}
|
||||
async function predict3(image16, config3, idx, count2) {
|
||||
async function predict3(image17, config3, idx, count2) {
|
||||
var _a, _b;
|
||||
if (!model2)
|
||||
return null;
|
||||
|
@ -4282,7 +4282,7 @@ async function predict3(image16, config3, idx, count2) {
|
|||
}
|
||||
skipped2 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const enhanced = enhance(image16);
|
||||
const enhanced = enhance(image17);
|
||||
let resT;
|
||||
const obj = {
|
||||
age: 0,
|
||||
|
@ -4860,16 +4860,16 @@ function getBoxCenter2(box6) {
|
|||
box6.startPoint[1] + (box6.endPoint[1] - box6.startPoint[1]) / 2
|
||||
];
|
||||
}
|
||||
function cutBoxFromImageAndResize2(box6, image16, cropSize) {
|
||||
const h = image16.shape[1];
|
||||
const w = image16.shape[2];
|
||||
function cutBoxFromImageAndResize2(box6, image17, cropSize) {
|
||||
const h = image17.shape[1];
|
||||
const w = image17.shape[2];
|
||||
const boxes = [[
|
||||
box6.startPoint[1] / h,
|
||||
box6.startPoint[0] / w,
|
||||
box6.endPoint[1] / h,
|
||||
box6.endPoint[0] / w
|
||||
]];
|
||||
return tf10.image.cropAndResize(image16, boxes, [0], cropSize);
|
||||
return tf10.image.cropAndResize(image17, boxes, [0], cropSize);
|
||||
}
|
||||
function scaleBoxCoordinates2(box6, factor) {
|
||||
const startPoint = [box6.startPoint[0] * factor[0], box6.startPoint[1] * factor[1]];
|
||||
|
@ -7904,9 +7904,9 @@ var HandDetector = class {
|
|||
async estimateHandBounds(input, config3) {
|
||||
const inputHeight = input.shape[1];
|
||||
const inputWidth = input.shape[2];
|
||||
const image16 = tf11.tidy(() => input.resizeBilinear([this.inputSize, this.inputSize]).div(127.5).sub(1));
|
||||
const predictions = await this.getBoxes(image16, config3);
|
||||
image16.dispose();
|
||||
const image17 = tf11.tidy(() => input.resizeBilinear([this.inputSize, this.inputSize]).div(127.5).sub(1));
|
||||
const predictions = await this.getBoxes(image17, config3);
|
||||
image17.dispose();
|
||||
const hands = [];
|
||||
if (!predictions || predictions.length === 0)
|
||||
return hands;
|
||||
|
@ -8051,11 +8051,11 @@ var HandPipeline = class {
|
|||
Math.trunc(coord[2])
|
||||
]);
|
||||
}
|
||||
async estimateHands(image16, config3) {
|
||||
async estimateHands(image17, config3) {
|
||||
let useFreshBox = false;
|
||||
let boxes;
|
||||
if (this.skipped === 0 || this.skipped > config3.hand.skipFrames || !config3.hand.landmarks || !config3.skipFrame) {
|
||||
boxes = await this.handDetector.estimateHandBounds(image16, config3);
|
||||
boxes = await this.handDetector.estimateHandBounds(image17, config3);
|
||||
this.skipped = 0;
|
||||
}
|
||||
if (config3.skipFrame)
|
||||
|
@ -8074,8 +8074,8 @@ var HandPipeline = class {
|
|||
if (config3.hand.landmarks) {
|
||||
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
|
||||
const palmCenter = getBoxCenter2(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image16.shape[2], palmCenter[1] / image16.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && tf12.ENV.flags.IS_BROWSER ? tf12.image.rotateWithOffset(image16, angle, 0, palmCenterNormalized) : image16.clone();
|
||||
const palmCenterNormalized = [palmCenter[0] / image17.shape[2], palmCenter[1] / image17.shape[1]];
|
||||
const rotatedImage = config3.hand.rotation && tf12.ENV.flags.IS_BROWSER ? tf12.image.rotateWithOffset(image17, angle, 0, palmCenterNormalized) : image17.clone();
|
||||
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
|
||||
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
|
||||
|
@ -8299,14 +8299,14 @@ async function load7(config3) {
|
|||
log("cached model:", model4["modelUrl"]);
|
||||
return model4;
|
||||
}
|
||||
async function predict6(image16, config3) {
|
||||
async function predict6(image17, config3) {
|
||||
var _a;
|
||||
if (!model4)
|
||||
return [];
|
||||
if (!config3.body.enabled)
|
||||
return [];
|
||||
const imgSize = { width: image16.shape[2] || 0, height: image16.shape[1] || 0 };
|
||||
const resize = tf14.image.resizeBilinear(image16, [model4["width"], model4["height"]], false);
|
||||
const imgSize = { width: image17.shape[2] || 0, height: image17.shape[1] || 0 };
|
||||
const resize = tf14.image.resizeBilinear(image17, [model4["width"], model4["height"]], false);
|
||||
const normalize = tf14.div(resize, [255]);
|
||||
resize.dispose();
|
||||
const resT = await model4.predict(normalize);
|
||||
|
@ -8382,7 +8382,7 @@ function max2d(inputs, minScore) {
|
|||
return [0, 0, newScore];
|
||||
});
|
||||
}
|
||||
async function predict7(image16, config3) {
|
||||
async function predict7(image17, config3) {
|
||||
if (skipped3 < config3.body.skipFrames && config3.skipFrame && Object.keys(keypoints).length > 0) {
|
||||
skipped3++;
|
||||
return [{ id: 0, score, box: box4, boxRaw, keypoints }];
|
||||
|
@ -8392,7 +8392,7 @@ async function predict7(image16, config3) {
|
|||
const tensor2 = tf15.tidy(() => {
|
||||
if (!model5.inputs[0].shape)
|
||||
return null;
|
||||
const resize = tf15.image.resizeBilinear(image16, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const resize = tf15.image.resizeBilinear(image17, [model5.inputs[0].shape[2], model5.inputs[0].shape[1]], false);
|
||||
const enhance2 = tf15.mul(resize, 2);
|
||||
const norm = enhance2.sub(1);
|
||||
return norm;
|
||||
|
@ -8418,8 +8418,8 @@ async function predict7(image16, config3) {
|
|||
y2 / model5.inputs[0].shape[1]
|
||||
],
|
||||
position: [
|
||||
Math.round(image16.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image16.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
Math.round(image17.shape[2] * x2 / model5.inputs[0].shape[2]),
|
||||
Math.round(image17.shape[1] * y2 / model5.inputs[0].shape[1])
|
||||
]
|
||||
});
|
||||
}
|
||||
|
@ -8467,7 +8467,7 @@ async function load9(config3) {
|
|||
log("cached model:", model6["modelUrl"]);
|
||||
return model6;
|
||||
}
|
||||
async function predict8(image16, config3) {
|
||||
async function predict8(image17, config3) {
|
||||
if (skipped4 < config3.body.skipFrames && config3.skipFrame && Object.keys(keypoints2).length > 0) {
|
||||
skipped4++;
|
||||
return [{ id: 0, score: score2, box: box5, boxRaw: boxRaw2, keypoints: keypoints2 }];
|
||||
|
@ -8477,7 +8477,7 @@ async function predict8(image16, config3) {
|
|||
const tensor2 = tf16.tidy(() => {
|
||||
if (!model6.inputs[0].shape)
|
||||
return null;
|
||||
const resize = tf16.image.resizeBilinear(image16, [model6.inputs[0].shape[2], model6.inputs[0].shape[1]], false);
|
||||
const resize = tf16.image.resizeBilinear(image17, [model6.inputs[0].shape[2], model6.inputs[0].shape[1]], false);
|
||||
const cast2 = tf16.cast(resize, "int32");
|
||||
return cast2;
|
||||
});
|
||||
|
@ -8501,8 +8501,8 @@ async function predict8(image16, config3) {
|
|||
kpt3[id][0]
|
||||
],
|
||||
position: [
|
||||
Math.round((image16.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image16.shape[1] || 0) * kpt3[id][0])
|
||||
Math.round((image17.shape[2] || 0) * kpt3[id][1]),
|
||||
Math.round((image17.shape[1] || 0) * kpt3[id][0])
|
||||
]
|
||||
});
|
||||
}
|
||||
|
@ -8697,15 +8697,15 @@ async function process2(res, inputSize, outputShape, config3) {
|
|||
results = results.filter((a, idx) => nmsIdx.includes(idx)).sort((a, b) => b.score - a.score);
|
||||
return results;
|
||||
}
|
||||
async function predict9(image16, config3) {
|
||||
async function predict9(image17, config3) {
|
||||
if (skipped5 < config3.object.skipFrames && config3.skipFrame && last3.length > 0) {
|
||||
skipped5++;
|
||||
return last3;
|
||||
}
|
||||
skipped5 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
const outputSize = [image16.shape[2], image16.shape[1]];
|
||||
const resize = tf17.image.resizeBilinear(image16, [model7.inputSize, model7.inputSize], false);
|
||||
const outputSize = [image17.shape[2], image17.shape[1]];
|
||||
const resize = tf17.image.resizeBilinear(image17, [model7.inputSize, model7.inputSize], false);
|
||||
const norm = resize.div(255);
|
||||
const transpose = norm.transpose([0, 3, 1, 2]);
|
||||
norm.dispose();
|
||||
|
@ -9059,8 +9059,8 @@ function GLImageFilter(params) {
|
|||
gl.uniform1f(_currentProgram.uniform.flipY, flipY ? -1 : 1);
|
||||
gl.drawArrays(gl.TRIANGLES, 0, 6);
|
||||
};
|
||||
this.apply = function(image16) {
|
||||
_resize(image16.width, image16.height);
|
||||
this.apply = function(image17) {
|
||||
_resize(image17.width, image17.height);
|
||||
_drawCount = 0;
|
||||
if (!_sourceTexture)
|
||||
_sourceTexture = gl.createTexture();
|
||||
|
@ -9069,7 +9069,7 @@ function GLImageFilter(params) {
|
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image16);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image17);
|
||||
if (_filterChain.length === 0) {
|
||||
_draw();
|
||||
return _canvas;
|
||||
|
@ -10433,9 +10433,9 @@ async function load12(config3) {
|
|||
async function predict11(input, config3) {
|
||||
var _a, _b, _c, _d;
|
||||
if (!config3.segmentation.enabled || !input.tensor || !input.canvas)
|
||||
return false;
|
||||
return null;
|
||||
if (!model9 || !model9.inputs[0].shape)
|
||||
return false;
|
||||
return null;
|
||||
const resizeInput = tf20.image.resizeBilinear(input.tensor, [model9.inputs[0].shape[1], model9.inputs[0].shape[2]], false);
|
||||
const norm = resizeInput.div(255);
|
||||
const res = model9.predict(norm);
|
||||
|
@ -10467,6 +10467,13 @@ async function predict11(input, config3) {
|
|||
tf20.dispose(resizeOutput);
|
||||
tf20.dispose(squeeze4);
|
||||
tf20.dispose(res);
|
||||
const alphaCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(input.canvas.width, input.canvas.height) : document.createElement("canvas");
|
||||
alphaCanvas.width = input.canvas.width;
|
||||
alphaCanvas.height = input.canvas.height;
|
||||
const ctxAlpha = alphaCanvas.getContext("2d");
|
||||
ctxAlpha.filter = "blur(8px";
|
||||
await ctxAlpha.drawImage(overlay, 0, 0);
|
||||
const alpha = ctxAlpha.getImageData(0, 0, input.canvas.width, input.canvas.height).data;
|
||||
const original = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(input.canvas.width, input.canvas.height) : document.createElement("canvas");
|
||||
original.width = input.canvas.width;
|
||||
original.height = input.canvas.height;
|
||||
|
@ -10475,10 +10482,43 @@ async function predict11(input, config3) {
|
|||
ctx.globalCompositeOperation = "darken";
|
||||
ctx.filter = "blur(8px)";
|
||||
await ctx.drawImage(overlay, 0, 0);
|
||||
ctx.globalCompositeOperation = "source-in";
|
||||
ctx.globalCompositeOperation = "source-over";
|
||||
ctx.filter = "none";
|
||||
input.canvas = original;
|
||||
return true;
|
||||
return alpha;
|
||||
}
|
||||
async function process5(input, background, config3) {
|
||||
var _a;
|
||||
if (!config3.segmentation.enabled)
|
||||
config3.segmentation.enabled = true;
|
||||
if (!model9)
|
||||
await load12(config3);
|
||||
const img = process4(input, config3);
|
||||
const alpha = await predict11(img, config3);
|
||||
tf20.dispose(img.tensor);
|
||||
if (background && alpha) {
|
||||
const tmp = process4(background, config3);
|
||||
const bg = tmp.canvas;
|
||||
tf20.dispose(tmp.tensor);
|
||||
const fg = img.canvas;
|
||||
const fgData = (_a = fg.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, fg.width, fg.height).data;
|
||||
const c = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(fg.width, fg.height) : document.createElement("canvas");
|
||||
c.width = fg.width;
|
||||
c.height = fg.height;
|
||||
const ctx = c.getContext("2d");
|
||||
ctx.globalCompositeOperation = "copy";
|
||||
ctx.drawImage(bg, 0, 0, c.width, c.height);
|
||||
const cData = ctx.getImageData(0, 0, c.width, c.height);
|
||||
for (let i = 0; i < c.width * c.height; i++) {
|
||||
cData.data[4 * i + 0] = (255 - alpha[4 * i + 0]) / 255 * cData.data[4 * i + 0] + alpha[4 * i + 0] / 255 * fgData[4 * i + 0];
|
||||
cData.data[4 * i + 1] = (255 - alpha[4 * i + 1]) / 255 * cData.data[4 * i + 1] + alpha[4 * i + 1] / 255 * fgData[4 * i + 1];
|
||||
cData.data[4 * i + 2] = (255 - alpha[4 * i + 2]) / 255 * cData.data[4 * i + 2] + alpha[4 * i + 2] / 255 * fgData[4 * i + 2];
|
||||
cData.data[4 * i + 3] = (255 - alpha[4 * i + 3]) / 255 * cData.data[4 * i + 3] + alpha[4 * i + 3] / 255 * fgData[4 * i + 3];
|
||||
}
|
||||
ctx.putImageData(cData, 0, 0);
|
||||
return c;
|
||||
}
|
||||
return img.canvas;
|
||||
}
|
||||
|
||||
// src/sample.ts
|
||||
|
@ -11423,6 +11463,9 @@ var Human = class {
|
|||
similarity(embedding1, embedding2) {
|
||||
return similarity(embedding1, embedding2);
|
||||
}
|
||||
segmentation(input, background) {
|
||||
return process5(input, background, this.config);
|
||||
}
|
||||
enhance(input) {
|
||||
return enhance(input);
|
||||
}
|
||||
|
@ -11525,8 +11568,8 @@ var Human = class {
|
|||
await __privateGet(this, _checkBackend).call(this);
|
||||
await this.load();
|
||||
timeStamp = now();
|
||||
const process5 = process4(input, this.config);
|
||||
if (!process5 || !process5.tensor) {
|
||||
const process6 = process4(input, this.config);
|
||||
if (!process6 || !process6.tensor) {
|
||||
log("could not convert input to tensor");
|
||||
resolve({ error: "could not convert input to tensor" });
|
||||
return;
|
||||
|
@ -11534,7 +11577,7 @@ var Human = class {
|
|||
this.performance.image = Math.trunc(now() - timeStamp);
|
||||
this.analyze("Get Image:");
|
||||
timeStamp = now();
|
||||
this.config.skipFrame = await __privateGet(this, _skipFrame).call(this, process5.tensor);
|
||||
this.config.skipFrame = await __privateGet(this, _skipFrame).call(this, process6.tensor);
|
||||
if (!this.performance.frames)
|
||||
this.performance.frames = 0;
|
||||
if (!this.performance.cached)
|
||||
|
@ -11550,13 +11593,13 @@ var Human = class {
|
|||
let objectRes;
|
||||
let elapsedTime;
|
||||
if (this.config.async) {
|
||||
faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : [];
|
||||
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
|
||||
if (this.performance.face)
|
||||
delete this.performance.face;
|
||||
} else {
|
||||
this.state = "run:face";
|
||||
timeStamp = now();
|
||||
faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : [];
|
||||
faceRes = this.config.face.enabled ? await detectFace(this, process6.tensor) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.face = elapsedTime;
|
||||
|
@ -11564,26 +11607,26 @@ var Human = class {
|
|||
this.analyze("Start Body:");
|
||||
if (this.config.async) {
|
||||
if (this.config.body.modelPath.includes("posenet"))
|
||||
bodyRes = this.config.body.enabled ? predict4(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict4(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("blazepose"))
|
||||
bodyRes = this.config.body.enabled ? predict6(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict6(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("efficientpose"))
|
||||
bodyRes = this.config.body.enabled ? predict7(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict7(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("movenet"))
|
||||
bodyRes = this.config.body.enabled ? predict8(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? predict8(process6.tensor, this.config) : [];
|
||||
if (this.performance.body)
|
||||
delete this.performance.body;
|
||||
} else {
|
||||
this.state = "run:body";
|
||||
timeStamp = now();
|
||||
if (this.config.body.modelPath.includes("posenet"))
|
||||
bodyRes = this.config.body.enabled ? await predict4(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict4(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("blazepose"))
|
||||
bodyRes = this.config.body.enabled ? await predict6(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict6(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("efficientpose"))
|
||||
bodyRes = this.config.body.enabled ? await predict7(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict7(process6.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes("movenet"))
|
||||
bodyRes = this.config.body.enabled ? await predict8(process5.tensor, this.config) : [];
|
||||
bodyRes = this.config.body.enabled ? await predict8(process6.tensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.body = elapsedTime;
|
||||
|
@ -11591,13 +11634,13 @@ var Human = class {
|
|||
this.analyze("End Body:");
|
||||
this.analyze("Start Hand:");
|
||||
if (this.config.async) {
|
||||
handRes = this.config.hand.enabled ? predict5(process5.tensor, this.config) : [];
|
||||
handRes = this.config.hand.enabled ? predict5(process6.tensor, this.config) : [];
|
||||
if (this.performance.hand)
|
||||
delete this.performance.hand;
|
||||
} else {
|
||||
this.state = "run:hand";
|
||||
timeStamp = now();
|
||||
handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : [];
|
||||
handRes = this.config.hand.enabled ? await predict5(process6.tensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.hand = elapsedTime;
|
||||
|
@ -11606,18 +11649,18 @@ var Human = class {
|
|||
this.analyze("Start Object:");
|
||||
if (this.config.async) {
|
||||
if (this.config.object.modelPath.includes("nanodet"))
|
||||
objectRes = this.config.object.enabled ? predict9(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? predict9(process6.tensor, this.config) : [];
|
||||
else if (this.config.object.modelPath.includes("centernet"))
|
||||
objectRes = this.config.object.enabled ? predict10(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? predict10(process6.tensor, this.config) : [];
|
||||
if (this.performance.object)
|
||||
delete this.performance.object;
|
||||
} else {
|
||||
this.state = "run:object";
|
||||
timeStamp = now();
|
||||
if (this.config.object.modelPath.includes("nanodet"))
|
||||
objectRes = this.config.object.enabled ? await predict9(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? await predict9(process6.tensor, this.config) : [];
|
||||
else if (this.config.object.modelPath.includes("centernet"))
|
||||
objectRes = this.config.object.enabled ? await predict10(process5.tensor, this.config) : [];
|
||||
objectRes = this.config.object.enabled ? await predict10(process6.tensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.object = elapsedTime;
|
||||
|
@ -11634,16 +11677,6 @@ var Human = class {
|
|||
else if (this.performance.gesture)
|
||||
delete this.performance.gesture;
|
||||
}
|
||||
if (this.config.segmentation.enabled) {
|
||||
this.analyze("Start Segmentation:");
|
||||
this.state = "run:segmentation";
|
||||
timeStamp = now();
|
||||
await predict11(process5, this.config);
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0)
|
||||
this.performance.segmentation = elapsedTime;
|
||||
this.analyze("End Segmentation:");
|
||||
}
|
||||
this.performance.total = Math.trunc(now() - timeStart);
|
||||
this.state = "idle";
|
||||
this.result = {
|
||||
|
@ -11653,14 +11686,14 @@ var Human = class {
|
|||
gesture: gestureRes,
|
||||
object: objectRes,
|
||||
performance: this.performance,
|
||||
canvas: process5.canvas,
|
||||
canvas: process6.canvas,
|
||||
timestamp: Date.now(),
|
||||
get persons() {
|
||||
var _a;
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process5 == null ? void 0 : process5.tensor) == null ? void 0 : _a.shape);
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process6 == null ? void 0 : process6.tensor) == null ? void 0 : _a.shape);
|
||||
}
|
||||
};
|
||||
tf21.dispose(process5.tensor);
|
||||
tf21.dispose(process6.tensor);
|
||||
resolve(this.result);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
2021-06-04 20:20:35 [36mINFO: [39m @vladmandic/human version 2.0.0
|
||||
2021-06-04 20:20:35 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-06-04 20:20:35 [36mINFO: [39m Toolchain: tfjs: 3.7.0 esbuild 0.12.6; typescript 4.2.4; typedoc: 0.20.36 eslint: 7.27.0
|
||||
2021-06-04 20:20:35 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-06-04 20:20:35 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-04 20:20:35 [35mSTATE:[39m Build for: node type: node: {"imports":41,"importBytes":426297,"outputBytes":374093,"outputFiles":"dist/human.node.js"}
|
||||
2021-06-04 20:20:35 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-04 20:20:35 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":41,"importBytes":426305,"outputBytes":374097,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-06-04 20:20:35 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-04 20:20:35 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":41,"importBytes":426372,"outputBytes":374169,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-06-04 20:20:35 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-04 20:20:35 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":41,"importBytes":426399,"outputBytes":246661,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-06-04 20:20:36 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1299,"moduleBytes":4230827,"imports":7,"importBytes":2478,"outputBytes":1140320,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-04 20:20:36 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":41,"importBytes":1565325,"outputBytes":1382891,"outputFiles":"dist/human.js"}
|
||||
2021-06-04 20:20:37 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":41,"importBytes":1565325,"outputBytes":1382883,"outputFiles":"dist/human.esm.js"}
|
||||
2021-06-04 20:20:37 [36mINFO: [39m Running Linter: ["server/","demo/","src/","test/"]
|
||||
2021-06-04 20:21:06 [36mINFO: [39m Linter complete: files: 69 errors: 0 warnings: 0
|
||||
2021-06-04 20:21:06 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-06-04 20:21:10 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-06-04 20:21:10 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
2021-06-04 20:21:29 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
||||
2021-06-05 11:51:39 [36mINFO: [39m @vladmandic/human version 2.0.0
|
||||
2021-06-05 11:51:39 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-06-05 11:51:39 [36mINFO: [39m Toolchain: tfjs: 3.7.0 esbuild 0.12.6; typescript 4.2.4; typedoc: 0.20.36 eslint: 7.27.0
|
||||
2021-06-05 11:51:39 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-06-05 11:51:39 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 11:51:39 [35mSTATE:[39m Build for: node type: node: {"imports":41,"importBytes":429821,"outputBytes":375853,"outputFiles":"dist/human.node.js"}
|
||||
2021-06-05 11:51:39 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 11:51:39 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":41,"importBytes":429829,"outputBytes":375857,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-06-05 11:51:39 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 11:51:39 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":41,"importBytes":429896,"outputBytes":375929,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-06-05 11:51:39 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 11:51:39 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":41,"importBytes":429923,"outputBytes":247724,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-06-05 11:51:40 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1299,"moduleBytes":4230827,"imports":7,"importBytes":2478,"outputBytes":1140320,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 11:51:40 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":41,"importBytes":1568849,"outputBytes":1383941,"outputFiles":"dist/human.js"}
|
||||
2021-06-05 11:51:41 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":41,"importBytes":1568849,"outputBytes":1383933,"outputFiles":"dist/human.esm.js"}
|
||||
2021-06-05 11:51:41 [36mINFO: [39m Running Linter: ["server/","demo/","src/","test/"]
|
||||
2021-06-05 11:52:09 [36mINFO: [39m Linter complete: files: 69 errors: 0 warnings: 0
|
||||
2021-06-05 11:52:09 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-06-05 11:52:13 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-06-05 11:52:13 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
2021-06-05 11:52:32 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
||||
|
|
|
@ -5,12 +5,16 @@
|
|||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Config } from '../config';
|
||||
import { GraphModel, Tensor } from '../tfjs/types';
|
||||
|
||||
let model: GraphModel;
|
||||
|
||||
let model;
|
||||
let last = { age: 0 };
|
||||
let skipped = Number.MAX_SAFE_INTEGER;
|
||||
|
||||
export async function load(config) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export async function load(config: Config | any) {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath));
|
||||
if (!model || !model.modelUrl) log('load model failed:', config.face.age.modelPath);
|
||||
|
@ -19,7 +23,8 @@ export async function load(config) {
|
|||
return model;
|
||||
}
|
||||
|
||||
export async function predict(image, config) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export async function predict(image: Tensor, config: Config | any) {
|
||||
if (!model) return null;
|
||||
if ((skipped < config.face.age.skipFrames) && config.skipFrame && last.age && (last.age > 0)) {
|
||||
skipped++;
|
||||
|
|
|
@ -353,6 +353,7 @@ const config: Config = {
|
|||
segmentation: {
|
||||
enabled: false, // if segmentation is enabled, output result.canvas will be augmented
|
||||
// with masked image containing only person output
|
||||
// segmentation is not triggered as part of detection and requires separate call to human.segmentation
|
||||
modelPath: 'selfie.json', // experimental: object detection model, can be absolute path or relative to modelBasePath
|
||||
// can be 'selfie' or 'meet'
|
||||
},
|
||||
|
|
|
@ -5,8 +5,10 @@
|
|||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Config } from '../config';
|
||||
import { GraphModel, Tensor } from '../tfjs/types';
|
||||
|
||||
let model;
|
||||
let model: GraphModel;
|
||||
let last = { gender: '' };
|
||||
let skipped = Number.MAX_SAFE_INTEGER;
|
||||
let alternative = false;
|
||||
|
@ -14,7 +16,8 @@ let alternative = false;
|
|||
// tuning values
|
||||
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
||||
|
||||
export async function load(config) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export async function load(config: Config | any) {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath));
|
||||
alternative = model.inputs[0].shape[3] === 1;
|
||||
|
@ -24,7 +27,8 @@ export async function load(config) {
|
|||
return model;
|
||||
}
|
||||
|
||||
export async function predict(image, config) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export async function predict(image: Tensor, config: Config | any) {
|
||||
if (!model) return null;
|
||||
if ((skipped < config.face.gender.skipFrames) && config.skipFrame && last.gender !== '') {
|
||||
skipped++;
|
||||
|
|
30
src/human.ts
30
src/human.ts
|
@ -31,16 +31,16 @@ import { Tensor } from './tfjs/types';
|
|||
|
||||
// export types
|
||||
export type { Config } from './config';
|
||||
export type { Result, Face, Hand, Body, Item, Gesture } from './result';
|
||||
export type { Result, Face, Hand, Body, Item, Gesture, Person } from './result';
|
||||
export type { DrawOptions } from './draw/draw';
|
||||
|
||||
/** Defines all possible input types for **Human** detection
|
||||
* @typedef Input
|
||||
* @typedef Input Type
|
||||
*/
|
||||
export type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
||||
|
||||
/** Error message
|
||||
* @typedef Error
|
||||
* @typedef Error Type
|
||||
*/
|
||||
export type Error = { error: string };
|
||||
|
||||
|
@ -205,6 +205,7 @@ export class Human {
|
|||
|
||||
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
||||
* - Calculation is based on normalized Minkowski distance between
|
||||
*
|
||||
* @param embedding1: face descriptor as array of numbers
|
||||
* @param embedding2: face descriptor as array of numbers
|
||||
* @returns similarity: number
|
||||
|
@ -214,6 +215,19 @@ export class Human {
|
|||
return faceres.similarity(embedding1, embedding2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Segmentation method takes any input and returns processed canvas with body segmentation
|
||||
* Optional parameter background is used to fill the background with specific input
|
||||
* Segmentation is not triggered as part of detect process
|
||||
*
|
||||
* @param input: {@link Input}
|
||||
* @param background?: {@link Input}
|
||||
* @returns Canvas
|
||||
*/
|
||||
segmentation(input: Input, background?: Input) {
|
||||
return segmentation.process(input, background, this.config);
|
||||
}
|
||||
|
||||
/** Enhance method performs additional enhacements to face image previously detected for futher processing
|
||||
* @param input: Tensor as provided in human.result.face[n].tensor
|
||||
* @returns Tensor
|
||||
|
@ -372,7 +386,8 @@ export class Human {
|
|||
/**
|
||||
* Runs interpolation using last known result and returns smoothened result
|
||||
* Interpolation is based on time since last known result so can be called independently
|
||||
* @param result?: use specific result set to run interpolation on
|
||||
*
|
||||
* @param result?: {@link Result} optional use specific result set to run interpolation on
|
||||
* @returns result: {@link Result}
|
||||
*/
|
||||
next = (result?: Result) => interpolate.calc(result || this.result) as Result;
|
||||
|
@ -410,9 +425,10 @@ export class Human {
|
|||
* - Pre-process input: {@link Input}
|
||||
* - Run inference for all configured models
|
||||
* - Process and return result: {@link Result}
|
||||
*
|
||||
* @param input: Input
|
||||
* @param userConfig?: Config
|
||||
* @returns result: Result
|
||||
* @param userConfig?: {@link Config}
|
||||
* @returns result: {@link Result}
|
||||
*/
|
||||
async detect(input: Input, userConfig?: Config | Record<string, unknown>): Promise<Result | Error> {
|
||||
// detection happens inside a promise
|
||||
|
@ -558,6 +574,7 @@ export class Human {
|
|||
}
|
||||
|
||||
// run segmentation
|
||||
/* not triggered as part of detect
|
||||
if (this.config.segmentation.enabled) {
|
||||
this.analyze('Start Segmentation:');
|
||||
this.state = 'run:segmentation';
|
||||
|
@ -567,6 +584,7 @@ export class Human {
|
|||
if (elapsedTime > 0) this.performance.segmentation = elapsedTime;
|
||||
this.analyze('End Segmentation:');
|
||||
}
|
||||
*/
|
||||
|
||||
this.performance.total = Math.trunc(now() - timeStart);
|
||||
this.state = 'idle';
|
||||
|
|
|
@ -5,6 +5,9 @@
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as fxImage from './imagefx';
|
||||
import { Tensor } from '../tfjs/types';
|
||||
import { Config } from '../config';
|
||||
|
||||
type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
||||
|
||||
const maxSize = 2048;
|
||||
// internal temp canvases
|
||||
|
@ -16,7 +19,7 @@ let fx;
|
|||
// process input image and return tensor
|
||||
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement
|
||||
// input is resized and run through imagefx filter
|
||||
export function process(input, config): { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement } {
|
||||
export function process(input: Input, config: Config): { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement } {
|
||||
let tensor;
|
||||
if (!input) throw new Error('Human: Input is missing');
|
||||
// sanity checks since different browsers do not implement all dom elements
|
||||
|
|
|
@ -124,6 +124,7 @@ export interface Item {
|
|||
}
|
||||
|
||||
/** Gesture results
|
||||
* @typedef Gesture Type
|
||||
*
|
||||
* Array of individual results with one object per detected gesture
|
||||
* Each result has:
|
||||
|
@ -137,6 +138,7 @@ export type Gesture =
|
|||
| { 'hand': number, gesture: string }
|
||||
|
||||
/** Person getter
|
||||
* @interface Person Interface
|
||||
*
|
||||
* Each result has:
|
||||
* - id: person id
|
||||
|
|
|
@ -4,15 +4,16 @@
|
|||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as image from '../image/image';
|
||||
import { GraphModel, Tensor } from '../tfjs/types';
|
||||
import { Config } from '../config';
|
||||
// import * as blur from './blur';
|
||||
|
||||
type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
||||
|
||||
let model: GraphModel;
|
||||
// let blurKernel;
|
||||
|
||||
export type Segmentation = boolean;
|
||||
|
||||
export async function load(config: Config): Promise<GraphModel> {
|
||||
if (!model) {
|
||||
// @ts-ignore type mismatch on GraphModel
|
||||
|
@ -24,9 +25,9 @@ export async function load(config: Config): Promise<GraphModel> {
|
|||
return model;
|
||||
}
|
||||
|
||||
export async function predict(input: { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement }, config: Config): Promise<Segmentation> {
|
||||
if (!config.segmentation.enabled || !input.tensor || !input.canvas) return false;
|
||||
if (!model || !model.inputs[0].shape) return false;
|
||||
export async function predict(input: { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement }, config: Config): Promise<Uint8ClampedArray | null> {
|
||||
if (!config.segmentation.enabled || !input.tensor || !input.canvas) return null;
|
||||
if (!model || !model.inputs[0].shape) return null;
|
||||
const resizeInput = tf.image.resizeBilinear(input.tensor, [model.inputs[0].shape[1], model.inputs[0].shape[2]], false);
|
||||
const norm = resizeInput.div(255);
|
||||
const res = model.predict(norm) as Tensor;
|
||||
|
@ -62,28 +63,69 @@ export async function predict(input: { tensor: Tensor | null, canvas: OffscreenC
|
|||
resizeOutput = tf.image.resizeBilinear(squeeze, [input.tensor?.shape[1], input.tensor?.shape[2]]);
|
||||
}
|
||||
|
||||
// const blurred = blur.blur(resizeOutput, blurKernel);
|
||||
if (tf.browser) await tf.browser.toPixels(resizeOutput, overlay);
|
||||
// tf.dispose(blurred);
|
||||
tf.dispose(resizeOutput);
|
||||
tf.dispose(squeeze);
|
||||
tf.dispose(res);
|
||||
|
||||
// get alpha channel data
|
||||
const alphaCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(input.canvas.width, input.canvas.height) : document.createElement('canvas'); // need one more copy since input may already have gl context so 2d context fails
|
||||
alphaCanvas.width = input.canvas.width;
|
||||
alphaCanvas.height = input.canvas.height;
|
||||
const ctxAlpha = alphaCanvas.getContext('2d') as CanvasRenderingContext2D;
|
||||
ctxAlpha.filter = 'blur(8px';
|
||||
await ctxAlpha.drawImage(overlay, 0, 0);
|
||||
const alpha = ctxAlpha.getImageData(0, 0, input.canvas.width, input.canvas.height).data;
|
||||
|
||||
// get original canvas merged with overlay
|
||||
const original = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(input.canvas.width, input.canvas.height) : document.createElement('canvas'); // need one more copy since input may already have gl context so 2d context fails
|
||||
original.width = input.canvas.width;
|
||||
original.height = input.canvas.height;
|
||||
const ctx = original.getContext('2d') as CanvasRenderingContext2D;
|
||||
|
||||
await ctx.drawImage(input.canvas, 0, 0);
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/globalCompositeOperation
|
||||
// best options are: darken, color-burn, multiply
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/globalCompositeOperation // best options are: darken, color-burn, multiply
|
||||
ctx.globalCompositeOperation = 'darken';
|
||||
ctx.filter = 'blur(8px)'; // use css filter for bluring, can be done with gaussian blur manually instead
|
||||
await ctx.drawImage(overlay, 0, 0);
|
||||
ctx.globalCompositeOperation = 'source-in'; // reset
|
||||
ctx.globalCompositeOperation = 'source-over'; // reset
|
||||
ctx.filter = 'none'; // reset
|
||||
|
||||
input.canvas = original;
|
||||
|
||||
return true;
|
||||
return alpha;
|
||||
}
|
||||
|
||||
export async function process(input: Input, background: Input | undefined, config: Config): Promise<HTMLCanvasElement | OffscreenCanvas> {
|
||||
if (!config.segmentation.enabled) config.segmentation.enabled = true; // override config
|
||||
if (!model) await load(config);
|
||||
const img = image.process(input, config);
|
||||
const alpha = await predict(img, config);
|
||||
tf.dispose(img.tensor);
|
||||
|
||||
if (background && alpha) {
|
||||
const tmp = image.process(background, config);
|
||||
const bg = tmp.canvas;
|
||||
tf.dispose(tmp.tensor);
|
||||
const fg = img.canvas;
|
||||
const fgData = fg.getContext('2d')?.getImageData(0, 0, fg.width, fg.height).data as Uint8ClampedArray;
|
||||
|
||||
const c = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(fg.width, fg.height) : document.createElement('canvas');
|
||||
c.width = fg.width;
|
||||
c.height = fg.height;
|
||||
const ctx = c.getContext('2d') as CanvasRenderingContext2D;
|
||||
|
||||
ctx.globalCompositeOperation = 'copy'; // reset
|
||||
ctx.drawImage(bg, 0, 0, c.width, c.height);
|
||||
const cData = ctx.getImageData(0, 0, c.width, c.height) as ImageData;
|
||||
for (let i = 0; i < c.width * c.height; i++) { // this should be done with globalCompositeOperation instead of looping through image data
|
||||
cData.data[4 * i + 0] = ((255 - alpha[4 * i + 0]) / 255.0 * cData.data[4 * i + 0]) + (alpha[4 * i + 0] / 255.0 * fgData[4 * i + 0]);
|
||||
cData.data[4 * i + 1] = ((255 - alpha[4 * i + 1]) / 255.0 * cData.data[4 * i + 1]) + (alpha[4 * i + 1] / 255.0 * fgData[4 * i + 1]);
|
||||
cData.data[4 * i + 2] = ((255 - alpha[4 * i + 2]) / 255.0 * cData.data[4 * i + 2]) + (alpha[4 * i + 2] / 255.0 * fgData[4 * i + 2]);
|
||||
cData.data[4 * i + 3] = ((255 - alpha[4 * i + 3]) / 255.0 * cData.data[4 * i + 3]) + (alpha[4 * i + 3] / 255.0 * fgData[4 * i + 3]);
|
||||
}
|
||||
ctx.putImageData(cData, 0, 0);
|
||||
|
||||
return c;
|
||||
}
|
||||
return img.canvas;
|
||||
}
|
||||
|
|
240
test/test.log
240
test/test.log
|
@ -1,120 +1,120 @@
|
|||
2021-06-04 09:20:07 [36mINFO: [39m @vladmandic/human version 2.0.0
|
||||
2021-06-04 09:20:07 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-06-04 09:20:07 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||
2021-06-04 09:20:07 [36mINFO: [39m test-node.js start
|
||||
2021-06-04 09:20:08 [35mSTATE:[39m test-node.js passed: create human
|
||||
2021-06-04 09:20:08 [36mINFO: [39m test-node.js human version: 2.0.0
|
||||
2021-06-04 09:20:08 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.0.0
|
||||
2021-06-04 09:20:08 [36mINFO: [39m test-node.js tfjs version: 3.7.0
|
||||
2021-06-04 09:20:08 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||
2021-06-04 09:20:08 [35mSTATE:[39m test-node.js passed: load models
|
||||
2021-06-04 09:20:08 [35mSTATE:[39m test-node.js result: defined models: 13 loaded models: 6
|
||||
2021-06-04 09:20:08 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||
2021-06-04 09:20:10 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||
2021-06-04 09:20:10 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
||||
2021-06-04 09:20:10 [32mDATA: [39m test-node.js result: performance: load: 449 total: 1590
|
||||
2021-06-04 09:20:12 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||
2021-06-04 09:20:12 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-04 09:20:12 [32mDATA: [39m test-node.js result: performance: load: 449 total: 1594
|
||||
2021-06-04 09:20:12 [36mINFO: [39m test-node.js test body variants
|
||||
2021-06-04 09:20:13 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-04 09:20:14 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-06-04 09:20:14 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-06-04 09:20:14 [32mDATA: [39m test-node.js result: performance: load: 449 total: 1012
|
||||
2021-06-04 09:20:15 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-04 09:20:15 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-06-04 09:20:15 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-04 09:20:15 [32mDATA: [39m test-node.js result: performance: load: 449 total: 194
|
||||
2021-06-04 09:20:16 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||
2021-06-04 09:20:16 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-06-04 09:20:16 [32mDATA: [39m test-node.js result: performance: load: 449 total: 865
|
||||
2021-06-04 09:20:16 [36mINFO: [39m test-node.js test: first instance
|
||||
2021-06-04 09:20:16 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-06-04 09:20:18 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-06-04 09:20:18 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-06-04 09:20:18 [32mDATA: [39m test-node.js result: performance: load: 449 total: 1551
|
||||
2021-06-04 09:20:18 [36mINFO: [39m test-node.js test: second instance
|
||||
2021-06-04 09:20:18 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-06-04 09:20:20 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-06-04 09:20:20 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-06-04 09:20:20 [32mDATA: [39m test-node.js result: performance: load: 4 total: 1338
|
||||
2021-06-04 09:20:20 [36mINFO: [39m test-node.js test: concurrent
|
||||
2021-06-04 09:20:20 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-06-04 09:20:20 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-06-04 09:20:21 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-04 09:20:22 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-04 09:20:27 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-06-04 09:20:27 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-06-04 09:20:27 [32mDATA: [39m test-node.js result: performance: load: 449 total: 5555
|
||||
2021-06-04 09:20:27 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-06-04 09:20:27 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-06-04 09:20:27 [32mDATA: [39m test-node.js result: performance: load: 4 total: 5555
|
||||
2021-06-04 09:20:27 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-06-04 09:20:27 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-04 09:20:27 [32mDATA: [39m test-node.js result: performance: load: 449 total: 5555
|
||||
2021-06-04 09:20:27 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-06-04 09:20:27 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-04 09:20:27 [32mDATA: [39m test-node.js result: performance: load: 4 total: 5555
|
||||
2021-06-04 09:20:27 [36mINFO: [39m test-node.js test complete: 19371 ms
|
||||
2021-06-04 09:20:27 [36mINFO: [39m test-node-gpu.js start
|
||||
2021-06-04 09:20:28 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-04 09:20:28.514106: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||
2021-06-04 09:20:28 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-04 09:20:28.621943: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||
2021-06-04 09:20:28 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-04 09:20:28.621974: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||
2021-06-04 09:20:28 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||
2021-06-04 09:20:28 [36mINFO: [39m test-node-gpu.js human version: 2.0.0
|
||||
2021-06-04 09:20:28 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
|
||||
2021-06-04 09:20:28 [36mINFO: [39m test-node-gpu.js tfjs version: 3.7.0
|
||||
2021-06-04 09:20:29 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||
2021-06-04 09:20:29 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||
2021-06-04 09:20:29 [35mSTATE:[39m test-node-gpu.js result: defined models: 13 loaded models: 6
|
||||
2021-06-04 09:20:29 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||
2021-06-04 09:20:30 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||
2021-06-04 09:20:30 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
||||
2021-06-04 09:20:30 [32mDATA: [39m test-node-gpu.js result: performance: load: 338 total: 1639
|
||||
2021-06-04 09:20:32 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||
2021-06-04 09:20:32 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-04 09:20:32 [32mDATA: [39m test-node-gpu.js result: performance: load: 338 total: 1534
|
||||
2021-06-04 09:20:32 [36mINFO: [39m test-node-gpu.js test body variants
|
||||
2021-06-04 09:20:33 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-04 09:20:34 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-06-04 09:20:34 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-06-04 09:20:34 [32mDATA: [39m test-node-gpu.js result: performance: load: 338 total: 1167
|
||||
2021-06-04 09:20:35 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-04 09:20:35 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-06-04 09:20:35 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-04 09:20:35 [32mDATA: [39m test-node-gpu.js result: performance: load: 338 total: 200
|
||||
2021-06-04 09:20:36 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||
2021-06-04 09:20:36 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-06-04 09:20:36 [32mDATA: [39m test-node-gpu.js result: performance: load: 338 total: 851
|
||||
2021-06-04 09:20:36 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||
2021-06-04 09:20:37 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-06-04 09:20:38 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-06-04 09:20:38 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-06-04 09:20:38 [32mDATA: [39m test-node-gpu.js result: performance: load: 338 total: 1355
|
||||
2021-06-04 09:20:38 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||
2021-06-04 09:20:38 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-06-04 09:20:40 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-06-04 09:20:40 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-06-04 09:20:40 [32mDATA: [39m test-node-gpu.js result: performance: load: 3 total: 1275
|
||||
2021-06-04 09:20:40 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||
2021-06-04 09:20:40 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-06-04 09:20:40 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-06-04 09:20:41 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-04 09:20:42 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-04 09:20:47 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-06-04 09:20:47 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-06-04 09:20:47 [32mDATA: [39m test-node-gpu.js result: performance: load: 338 total: 5529
|
||||
2021-06-04 09:20:47 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-06-04 09:20:47 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-06-04 09:20:47 [32mDATA: [39m test-node-gpu.js result: performance: load: 3 total: 5529
|
||||
2021-06-04 09:20:47 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-06-04 09:20:47 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-04 09:20:47 [32mDATA: [39m test-node-gpu.js result: performance: load: 338 total: 5529
|
||||
2021-06-04 09:20:47 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-06-04 09:20:47 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-04 09:20:47 [32mDATA: [39m test-node-gpu.js result: performance: load: 3 total: 5529
|
||||
2021-06-04 09:20:47 [36mINFO: [39m test-node-gpu.js test complete: 19093 ms
|
||||
2021-06-04 09:20:47 [36mINFO: [39m test-node-wasm.js start
|
||||
2021-06-04 09:20:48 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
||||
2021-06-04 09:20:48 [31mERROR:[39m test-node-wasm.js aborting test
|
||||
2021-06-04 09:20:48 [36mINFO: [39m status: {"passed":46,"failed":1}
|
||||
2021-06-05 11:53:54 [36mINFO: [39m @vladmandic/human version 2.0.0
|
||||
2021-06-05 11:53:54 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-06-05 11:53:54 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||
2021-06-05 11:53:54 [36mINFO: [39m test-node.js start
|
||||
2021-06-05 11:53:56 [35mSTATE:[39m test-node.js passed: create human
|
||||
2021-06-05 11:53:56 [36mINFO: [39m test-node.js human version: 2.0.0
|
||||
2021-06-05 11:53:56 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.0.0
|
||||
2021-06-05 11:53:56 [36mINFO: [39m test-node.js tfjs version: 3.7.0
|
||||
2021-06-05 11:53:56 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||
2021-06-05 11:53:56 [35mSTATE:[39m test-node.js passed: load models
|
||||
2021-06-05 11:53:56 [35mSTATE:[39m test-node.js result: defined models: 14 loaded models: 6
|
||||
2021-06-05 11:53:56 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||
2021-06-05 11:53:58 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||
2021-06-05 11:53:58 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
||||
2021-06-05 11:53:58 [32mDATA: [39m test-node.js result: performance: load: 430 total: 1782
|
||||
2021-06-05 11:54:00 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||
2021-06-05 11:54:00 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-05 11:54:00 [32mDATA: [39m test-node.js result: performance: load: 430 total: 1700
|
||||
2021-06-05 11:54:00 [36mINFO: [39m test-node.js test body variants
|
||||
2021-06-05 11:54:01 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-05 11:54:02 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-06-05 11:54:02 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-06-05 11:54:02 [32mDATA: [39m test-node.js result: performance: load: 430 total: 1021
|
||||
2021-06-05 11:54:03 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-05 11:54:03 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-06-05 11:54:03 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-05 11:54:03 [32mDATA: [39m test-node.js result: performance: load: 430 total: 217
|
||||
2021-06-05 11:54:03 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||
2021-06-05 11:54:03 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0,"keypoints":0}
|
||||
2021-06-05 11:54:03 [32mDATA: [39m test-node.js result: performance: load: 430 total: 186
|
||||
2021-06-05 11:54:03 [36mINFO: [39m test-node.js test: first instance
|
||||
2021-06-05 11:54:04 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-06-05 11:54:04 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-06-05 11:54:04 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-06-05 11:54:04 [32mDATA: [39m test-node.js result: performance: load: 430 total: 113
|
||||
2021-06-05 11:54:04 [36mINFO: [39m test-node.js test: second instance
|
||||
2021-06-05 11:54:04 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-06-05 11:54:05 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-06-05 11:54:05 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-06-05 11:54:05 [32mDATA: [39m test-node.js result: performance: load: 2 total: 1319
|
||||
2021-06-05 11:54:05 [36mINFO: [39m test-node.js test: concurrent
|
||||
2021-06-05 11:54:06 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-06-05 11:54:06 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-06-05 11:54:07 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-05 11:54:08 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-05 11:54:13 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-06-05 11:54:13 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-06-05 11:54:13 [32mDATA: [39m test-node.js result: performance: load: 430 total: 5500
|
||||
2021-06-05 11:54:13 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-06-05 11:54:13 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-06-05 11:54:13 [32mDATA: [39m test-node.js result: performance: load: 2 total: 5500
|
||||
2021-06-05 11:54:13 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-06-05 11:54:13 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-05 11:54:13 [32mDATA: [39m test-node.js result: performance: load: 430 total: 5500
|
||||
2021-06-05 11:54:13 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-06-05 11:54:13 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-05 11:54:13 [32mDATA: [39m test-node.js result: performance: load: 2 total: 5500
|
||||
2021-06-05 11:54:13 [36mINFO: [39m test-node.js test complete: 17471 ms
|
||||
2021-06-05 11:54:13 [36mINFO: [39m test-node-gpu.js start
|
||||
2021-06-05 11:54:14 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-05 11:54:14.837663: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||
2021-06-05 11:54:15 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-05 11:54:15.036299: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||
2021-06-05 11:54:15 [33mWARN: [39m test-node-gpu.js stderr: 2021-06-05 11:54:15.036330: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||
2021-06-05 11:54:15 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||
2021-06-05 11:54:15 [36mINFO: [39m test-node-gpu.js human version: 2.0.0
|
||||
2021-06-05 11:54:15 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
|
||||
2021-06-05 11:54:15 [36mINFO: [39m test-node-gpu.js tfjs version: 3.7.0
|
||||
2021-06-05 11:54:15 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||
2021-06-05 11:54:15 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||
2021-06-05 11:54:15 [35mSTATE:[39m test-node-gpu.js result: defined models: 14 loaded models: 6
|
||||
2021-06-05 11:54:15 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||
2021-06-05 11:54:17 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||
2021-06-05 11:54:17 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
|
||||
2021-06-05 11:54:17 [32mDATA: [39m test-node-gpu.js result: performance: load: 334 total: 1776
|
||||
2021-06-05 11:54:18 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||
2021-06-05 11:54:18 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-05 11:54:18 [32mDATA: [39m test-node-gpu.js result: performance: load: 334 total: 1596
|
||||
2021-06-05 11:54:18 [36mINFO: [39m test-node-gpu.js test body variants
|
||||
2021-06-05 11:54:19 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-05 11:54:21 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-06-05 11:54:21 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-06-05 11:54:21 [32mDATA: [39m test-node-gpu.js result: performance: load: 334 total: 1179
|
||||
2021-06-05 11:54:21 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-05 11:54:22 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-06-05 11:54:22 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-05 11:54:22 [32mDATA: [39m test-node-gpu.js result: performance: load: 334 total: 196
|
||||
2021-06-05 11:54:23 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||
2021-06-05 11:54:23 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-06-05 11:54:23 [32mDATA: [39m test-node-gpu.js result: performance: load: 334 total: 908
|
||||
2021-06-05 11:54:23 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||
2021-06-05 11:54:23 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-06-05 11:54:24 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-06-05 11:54:24 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-06-05 11:54:24 [32mDATA: [39m test-node-gpu.js result: performance: load: 334 total: 1275
|
||||
2021-06-05 11:54:24 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||
2021-06-05 11:54:25 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-06-05 11:54:26 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-06-05 11:54:26 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.78,"keypoints":7}
|
||||
2021-06-05 11:54:26 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 1202
|
||||
2021-06-05 11:54:26 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||
2021-06-05 11:54:26 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-06-05 11:54:26 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-06-05 11:54:27 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-05 11:54:28 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-06-05 11:54:34 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-06-05 11:54:34 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-06-05 11:54:34 [32mDATA: [39m test-node-gpu.js result: performance: load: 334 total: 5393
|
||||
2021-06-05 11:54:34 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-06-05 11:54:34 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
|
||||
2021-06-05 11:54:34 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 5393
|
||||
2021-06-05 11:54:34 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-06-05 11:54:34 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-05 11:54:34 [32mDATA: [39m test-node-gpu.js result: performance: load: 334 total: 5393
|
||||
2021-06-05 11:54:34 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-06-05 11:54:34 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
|
||||
2021-06-05 11:54:34 [32mDATA: [39m test-node-gpu.js result: performance: load: 5 total: 5393
|
||||
2021-06-05 11:54:34 [36mINFO: [39m test-node-gpu.js test complete: 18953 ms
|
||||
2021-06-05 11:54:34 [36mINFO: [39m test-node-wasm.js start
|
||||
2021-06-05 11:54:34 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
||||
2021-06-05 11:54:34 [31mERROR:[39m test-node-wasm.js aborting test
|
||||
2021-06-05 11:54:34 [36mINFO: [39m status: {"passed":46,"failed":1}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"noEmitOnError": false,
|
||||
"module": "es2020",
|
||||
"target": "es2018",
|
||||
"moduleResolution": "node",
|
||||
|
@ -18,7 +19,14 @@
|
|||
"skipLibCheck": true,
|
||||
"sourceMap": false,
|
||||
"strictNullChecks": true,
|
||||
"allowJs": true
|
||||
"allowJs": true,
|
||||
"baseUrl": "./",
|
||||
"paths": {
|
||||
"tslib": ["node_modules/tslib/tslib.d.ts"],
|
||||
"@tensorflow/tfjs-node/dist/io/file_system": ["node_modules/@tensorflow/tfjs-node/dist/io/file_system.js"],
|
||||
"@tensorflow/tfjs-core/dist/index": ["node_modules/@tensorflow/tfjs-core/dist/index.js"],
|
||||
"@tensorflow/tfjs-converter/dist/index": ["node_modules/@tensorflow/tfjs-converter/dist/index.js"]
|
||||
}
|
||||
},
|
||||
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
|
||||
"include": ["src/*", "src/***/*"],
|
||||
|
@ -35,6 +43,6 @@
|
|||
"entryPoints": "src/human.ts",
|
||||
"logLevel": "Info",
|
||||
"logger": "none",
|
||||
"theme": "wiki/theme/",
|
||||
"theme": "wiki/theme/"
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -124,6 +124,7 @@
|
|||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#load" class="tsd-kind-icon">load</a></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#match" class="tsd-kind-icon">match</a></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#next" class="tsd-kind-icon">next</a></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#segmentation-1" class="tsd-kind-icon">segmentation</a></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#similarity" class="tsd-kind-icon">similarity</a></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#warmup" class="tsd-kind-icon">warmup</a></li>
|
||||
</ul>
|
||||
|
@ -390,7 +391,7 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-class">
|
||||
<a name="image" class="tsd-anchor"></a>
|
||||
<h3>image</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">image<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">; </span>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">image<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">; </span>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
|
@ -407,7 +408,7 @@
|
|||
<h5>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></h5>
|
||||
<h5>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span></h5>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
@ -575,7 +576,7 @@
|
|||
<a name="detect" class="tsd-anchor"></a>
|
||||
<h3>detect</h3>
|
||||
<ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class">
|
||||
<li class="tsd-signature tsd-kind-icon">detect<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><a href="../index.html#input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a>, userConfig<span class="tsd-signature-symbol">?: </span><a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">></span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol"><</span><a href="../index.html#error" class="tsd-signature-type" data-tsd-kind="Type alias">Error</a><span class="tsd-signature-symbol"> | </span><a href="../interfaces/result.html" class="tsd-signature-type" data-tsd-kind="Interface">Result</a><span class="tsd-signature-symbol">></span></li>
|
||||
<li class="tsd-signature tsd-kind-icon">detect<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span>, userConfig<span class="tsd-signature-symbol">?: </span><a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">></span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol"><</span><a href="../index.html#error" class="tsd-signature-type" data-tsd-kind="Type alias">Error</a><span class="tsd-signature-symbol"> | </span><a href="../interfaces/result.html" class="tsd-signature-type" data-tsd-kind="Interface">Result</a><span class="tsd-signature-symbol">></span></li>
|
||||
</ul>
|
||||
<ul class="tsd-descriptions">
|
||||
<li class="tsd-description">
|
||||
|
@ -595,14 +596,14 @@
|
|||
<h4 class="tsd-parameters-title">Parameters</h4>
|
||||
<ul class="tsd-parameters">
|
||||
<li>
|
||||
<h5>input: <a href="../index.html#input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a></h5>
|
||||
<h5>input: <span class="tsd-signature-type">any</span></h5>
|
||||
</li>
|
||||
<li>
|
||||
<h5><span class="tsd-flag ts-flagOptional">Optional</span> userConfig: <a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">></span></h5>
|
||||
</li>
|
||||
</ul>
|
||||
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol"><</span><a href="../index.html#error" class="tsd-signature-type" data-tsd-kind="Type alias">Error</a><span class="tsd-signature-symbol"> | </span><a href="../interfaces/result.html" class="tsd-signature-type" data-tsd-kind="Interface">Result</a><span class="tsd-signature-symbol">></span></h4>
|
||||
<p>result: Result</p>
|
||||
<p>result: <a href="../interfaces/result.html">Result</a></p>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
@ -610,7 +611,7 @@
|
|||
<a name="enhance" class="tsd-anchor"></a>
|
||||
<h3>enhance</h3>
|
||||
<ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class">
|
||||
<li class="tsd-signature tsd-kind-icon">enhance<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></li>
|
||||
<li class="tsd-signature tsd-kind-icon">enhance<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span></li>
|
||||
</ul>
|
||||
<ul class="tsd-descriptions">
|
||||
<li class="tsd-description">
|
||||
|
@ -624,10 +625,10 @@
|
|||
<h4 class="tsd-parameters-title">Parameters</h4>
|
||||
<ul class="tsd-parameters">
|
||||
<li>
|
||||
<h5>input: <span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></h5>
|
||||
<h5>input: <span class="tsd-signature-type">any</span></h5>
|
||||
</li>
|
||||
</ul>
|
||||
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></h4>
|
||||
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">any</span></h4>
|
||||
<p>Tensor</p>
|
||||
</li>
|
||||
</ul>
|
||||
|
@ -733,6 +734,37 @@
|
|||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class">
|
||||
<a name="segmentation-1" class="tsd-anchor"></a>
|
||||
<h3>segmentation</h3>
|
||||
<ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class">
|
||||
<li class="tsd-signature tsd-kind-icon">segmentation<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span>, background<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">></span></li>
|
||||
</ul>
|
||||
<ul class="tsd-descriptions">
|
||||
<li class="tsd-description">
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
<div class="lead">
|
||||
<p>Segmentation method takes any input and returns processed canvas with body segmentation
|
||||
Optional parameter background is used to fill the background with specific input
|
||||
Segmentation is not triggered as part of detect process</p>
|
||||
</div>
|
||||
</div>
|
||||
<h4 class="tsd-parameters-title">Parameters</h4>
|
||||
<ul class="tsd-parameters">
|
||||
<li>
|
||||
<h5>input: <span class="tsd-signature-type">any</span></h5>
|
||||
</li>
|
||||
<li>
|
||||
<h5><span class="tsd-flag ts-flagOptional">Optional</span> background: <span class="tsd-signature-type">any</span></h5>
|
||||
</li>
|
||||
</ul>
|
||||
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">></span></h4>
|
||||
<p>Canvas</p>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class">
|
||||
<a name="similarity" class="tsd-anchor"></a>
|
||||
<h3>similarity</h3>
|
||||
|
@ -868,6 +900,9 @@
|
|||
<li class=" tsd-kind-method tsd-parent-kind-class">
|
||||
<a href="human.html#next" class="tsd-kind-icon">next</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-method tsd-parent-kind-class">
|
||||
<a href="human.html#segmentation-1" class="tsd-kind-icon">segmentation</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-method tsd-parent-kind-class">
|
||||
<a href="human.html#similarity" class="tsd-kind-icon">similarity</a>
|
||||
</li>
|
||||
|
@ -896,6 +931,9 @@
|
|||
<li class=" tsd-kind-interface">
|
||||
<a href="../interfaces/item.html" class="tsd-kind-icon">Item</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="../interfaces/person.html" class="tsd-kind-icon">Person</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="../interfaces/result.html" class="tsd-kind-icon">Result</a>
|
||||
</li>
|
||||
|
|
|
@ -80,6 +80,7 @@
|
|||
<li class="tsd-kind-interface"><a href="interfaces/face.html" class="tsd-kind-icon">Face</a></li>
|
||||
<li class="tsd-kind-interface"><a href="interfaces/hand.html" class="tsd-kind-icon">Hand</a></li>
|
||||
<li class="tsd-kind-interface"><a href="interfaces/item.html" class="tsd-kind-icon">Item</a></li>
|
||||
<li class="tsd-kind-interface"><a href="interfaces/person.html" class="tsd-kind-icon">Person</a></li>
|
||||
<li class="tsd-kind-interface"><a href="interfaces/result.html" class="tsd-kind-icon">Result</a></li>
|
||||
</ul>
|
||||
</section>
|
||||
|
@ -135,12 +136,6 @@
|
|||
<div class="lead">
|
||||
<p>Gesture results</p>
|
||||
</div>
|
||||
<p>Array of individual results with one object per detected gesture
|
||||
Each result has:</p>
|
||||
<ul>
|
||||
<li>part: part name and number where gesture was detected: face, iris, body, hand</li>
|
||||
<li>gesture: gesture detected</li>
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-type-alias">
|
||||
|
@ -203,6 +198,9 @@
|
|||
<li class=" tsd-kind-interface">
|
||||
<a href="interfaces/item.html" class="tsd-kind-icon">Item</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="interfaces/person.html" class="tsd-kind-icon">Person</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="interfaces/result.html" class="tsd-kind-icon">Result</a>
|
||||
</li>
|
||||
|
|
|
@ -201,6 +201,9 @@
|
|||
<li class=" tsd-kind-interface">
|
||||
<a href="item.html" class="tsd-kind-icon">Item</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="person.html" class="tsd-kind-icon">Person</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="result.html" class="tsd-kind-icon">Result</a>
|
||||
</li>
|
||||
|
|
|
@ -779,6 +779,9 @@
|
|||
<li class=" tsd-kind-interface">
|
||||
<a href="item.html" class="tsd-kind-icon">Item</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="person.html" class="tsd-kind-icon">Person</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="result.html" class="tsd-kind-icon">Result</a>
|
||||
</li>
|
||||
|
|
|
@ -338,6 +338,9 @@
|
|||
<li class=" tsd-kind-interface">
|
||||
<a href="item.html" class="tsd-kind-icon">Item</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="person.html" class="tsd-kind-icon">Person</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="result.html" class="tsd-kind-icon">Result</a>
|
||||
</li>
|
||||
|
|
|
@ -216,7 +216,7 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="image" class="tsd-anchor"></a>
|
||||
<h3><span class="tsd-flag ts-flagOptional">Optional</span> image</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">image<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">image<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">any</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
|
@ -291,7 +291,7 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="tensor" class="tsd-anchor"></a>
|
||||
<h3>tensor</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">tensor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">tensor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">any</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
|
@ -391,6 +391,9 @@
|
|||
<li class=" tsd-kind-interface">
|
||||
<a href="item.html" class="tsd-kind-icon">Item</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="person.html" class="tsd-kind-icon">Person</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="result.html" class="tsd-kind-icon">Result</a>
|
||||
</li>
|
||||
|
|
|
@ -209,6 +209,9 @@
|
|||
<li class=" tsd-kind-interface">
|
||||
<a href="item.html" class="tsd-kind-icon">Item</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="person.html" class="tsd-kind-icon">Person</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="result.html" class="tsd-kind-icon">Result</a>
|
||||
</li>
|
||||
|
|
|
@ -211,6 +211,9 @@
|
|||
</li>
|
||||
</ul>
|
||||
<ul class="after-current">
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="person.html" class="tsd-kind-icon">Person</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="result.html" class="tsd-kind-icon">Result</a>
|
||||
</li>
|
||||
|
|
|
@ -0,0 +1,281 @@
|
|||
<!doctype html>
|
||||
<html class="default no-js">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<title>Person | @vladmandic/human</title>
|
||||
<meta name="description" content="Documentation for @vladmandic/human">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="stylesheet" href="../assets/css/main.css">
|
||||
<script async src="../assets/js/search.js" id="search-script"></script>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<div class="tsd-page-toolbar">
|
||||
<div class="container">
|
||||
<div class="table-wrap">
|
||||
<div class="table-cell" id="tsd-search" data-index="../assets/js/search.json" data-base="..">
|
||||
<div class="field">
|
||||
<label for="tsd-search-field" class="tsd-widget search no-caption">Search</label>
|
||||
<input id="tsd-search-field" type="text" />
|
||||
</div>
|
||||
<ul class="results">
|
||||
<li class="state loading">Preparing search index...</li>
|
||||
<li class="state failure">The search index is not available</li>
|
||||
</ul>
|
||||
<a href="../index.html" class="title">@vladmandic/human</a>
|
||||
</div>
|
||||
<div class="table-cell" id="tsd-widgets">
|
||||
<div id="tsd-filter">
|
||||
<a href="#" class="tsd-widget options no-caption" data-toggle="options">Options</a>
|
||||
<div class="tsd-filter-group">
|
||||
<div class="tsd-select" id="tsd-filter-visibility">
|
||||
<span class="tsd-select-label">All</span>
|
||||
<ul class="tsd-select-list">
|
||||
<li data-value="public">Public</li>
|
||||
<li data-value="protected">Public/Protected</li>
|
||||
<li data-value="private" class="selected">All</li>
|
||||
</ul>
|
||||
</div>
|
||||
<input type="checkbox" id="tsd-filter-inherited" checked />
|
||||
<label class="tsd-widget" for="tsd-filter-inherited">Inherited</label>
|
||||
</div>
|
||||
</div>
|
||||
<a href="#" class="tsd-widget menu no-caption" data-toggle="menu">Menu</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="tsd-page-title">
|
||||
<div class="container">
|
||||
<ul class="tsd-breadcrumb">
|
||||
<li>
|
||||
<a href="../index.html">@vladmandic/human</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="person.html">Person</a>
|
||||
</li>
|
||||
</ul>
|
||||
<h1>Interface Person</h1>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
<div class="container container-main">
|
||||
<div class="row">
|
||||
<div class="col-8 col-content">
|
||||
<section class="tsd-panel tsd-comment">
|
||||
<div class="tsd-comment tsd-typography">
|
||||
<div class="lead">
|
||||
<p>Person getter</p>
|
||||
</div>
|
||||
<dl class="tsd-comment-tags">
|
||||
<dt>interface</dt>
|
||||
<dd><p>Person Interface</p>
|
||||
<p>Each result has:</p>
|
||||
<ul>
|
||||
<li>id: person id</li>
|
||||
<li>face: face object</li>
|
||||
<li>body: body object</li>
|
||||
<li>hands: array of hand objects</li>
|
||||
<li>gestures: array of gestures</li>
|
||||
<li>box: bounding box: x, y, width, height normalized to input image resolution</li>
|
||||
<li>boxRaw: bounding box: x, y, width, height normalized to 0..1</li>
|
||||
</ul>
|
||||
</dd>
|
||||
</dl>
|
||||
</div>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-hierarchy">
|
||||
<h3>Hierarchy</h3>
|
||||
<ul class="tsd-hierarchy">
|
||||
<li>
|
||||
<span class="target">Person</span>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
<section class="tsd-panel-group tsd-index-group">
|
||||
<h2>Index</h2>
|
||||
<section class="tsd-panel tsd-index-panel">
|
||||
<div class="tsd-index-content">
|
||||
<section class="tsd-index-section ">
|
||||
<h3>Properties</h3>
|
||||
<ul class="tsd-index-list">
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="person.html#body" class="tsd-kind-icon">body</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="person.html#box" class="tsd-kind-icon">box</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="person.html#boxraw" class="tsd-kind-icon">box<wbr>Raw</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="person.html#face" class="tsd-kind-icon">face</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="person.html#gestures" class="tsd-kind-icon">gestures</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="person.html#hands" class="tsd-kind-icon">hands</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="person.html#id" class="tsd-kind-icon">id</a></li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
</section>
|
||||
</section>
|
||||
<section class="tsd-panel-group tsd-member-group ">
|
||||
<h2>Properties</h2>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="body" class="tsd-anchor"></a>
|
||||
<h3>body</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">body<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="body.html" class="tsd-signature-type" data-tsd-kind="Interface">Body</a></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="box" class="tsd-anchor"></a>
|
||||
<h3>box</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">box<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">[</span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">]</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="boxraw" class="tsd-anchor"></a>
|
||||
<h3><span class="tsd-flag ts-flagOptional">Optional</span> box<wbr>Raw</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">box<wbr>Raw<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">[</span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">]</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="face" class="tsd-anchor"></a>
|
||||
<h3>face</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">face<span class="tsd-signature-symbol">:</span> <a href="face.html" class="tsd-signature-type" data-tsd-kind="Interface">Face</a></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="gestures" class="tsd-anchor"></a>
|
||||
<h3>gestures</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">gestures<span class="tsd-signature-symbol">:</span> <a href="../index.html#gesture" class="tsd-signature-type" data-tsd-kind="Type alias">Gesture</a><span class="tsd-signature-symbol">[]</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="hands" class="tsd-anchor"></a>
|
||||
<h3>hands</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">hands<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>left<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="hand.html" class="tsd-signature-type" data-tsd-kind="Interface">Hand</a><span class="tsd-signature-symbol">; </span>right<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="hand.html" class="tsd-signature-type" data-tsd-kind="Interface">Hand</a><span class="tsd-signature-symbol"> }</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-type-declaration">
|
||||
<h4>Type declaration</h4>
|
||||
<ul class="tsd-parameters">
|
||||
<li class="tsd-parameter">
|
||||
<h5>left<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="hand.html" class="tsd-signature-type" data-tsd-kind="Interface">Hand</a></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>right<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><a href="hand.html" class="tsd-signature-type" data-tsd-kind="Interface">Hand</a></h5>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="id" class="tsd-anchor"></a>
|
||||
<h3>id</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">id<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
</section>
|
||||
</div>
|
||||
<div class="col-4 col-menu menu-sticky-wrap menu-highlight">
|
||||
<nav class="tsd-navigation primary">
|
||||
<ul>
|
||||
<li class=" ">
|
||||
<a href="../index.html">Exports</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
<nav class="tsd-navigation secondary menu-sticky">
|
||||
<ul class="before-current">
|
||||
<li class=" tsd-kind-reference">
|
||||
<a href="../index.html#default" class="tsd-kind-icon">default</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-class">
|
||||
<a href="../classes/human.html" class="tsd-kind-icon">Human</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="body.html" class="tsd-kind-icon">Body</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="config.html" class="tsd-kind-icon">Config</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="drawoptions.html" class="tsd-kind-icon">Draw<wbr>Options</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="face.html" class="tsd-kind-icon">Face</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="hand.html" class="tsd-kind-icon">Hand</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="item.html" class="tsd-kind-icon">Item</a>
|
||||
</li>
|
||||
</ul>
|
||||
<ul class="current">
|
||||
<li class="current tsd-kind-interface">
|
||||
<a href="person.html" class="tsd-kind-icon">Person</a>
|
||||
<ul>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="person.html#body" class="tsd-kind-icon">body</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="person.html#box" class="tsd-kind-icon">box</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="person.html#boxraw" class="tsd-kind-icon">box<wbr>Raw</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="person.html#face" class="tsd-kind-icon">face</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="person.html#gestures" class="tsd-kind-icon">gestures</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="person.html#hands" class="tsd-kind-icon">hands</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="person.html#id" class="tsd-kind-icon">id</a>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
<ul class="after-current">
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="result.html" class="tsd-kind-icon">Result</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-type-alias">
|
||||
<a href="../index.html#error" class="tsd-kind-icon">Error</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-type-alias">
|
||||
<a href="../index.html#gesture" class="tsd-kind-icon">Gesture</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-type-alias">
|
||||
<a href="../index.html#input" class="tsd-kind-icon">Input</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-type-alias">
|
||||
<a href="../index.html#tensorflow" class="tsd-kind-icon">Tensor<wbr>Flow</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<footer>
|
||||
<div class="container">
|
||||
<h2>Legend</h2>
|
||||
<div class="tsd-legend-group">
|
||||
<ul class="tsd-legend">
|
||||
<li class="tsd-kind-constructor tsd-parent-kind-class"><span class="tsd-kind-icon">Constructor</span></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-class"><span class="tsd-kind-icon">Property</span></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><span class="tsd-kind-icon">Method</span></li>
|
||||
</ul>
|
||||
<ul class="tsd-legend">
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><span class="tsd-kind-icon">Property</span></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
<div class="overlay"></div>
|
||||
<script src="../assets/js/main.js"></script>
|
||||
</body>
|
||||
</html>
|
|
@ -189,7 +189,7 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="persons" class="tsd-anchor"></a>
|
||||
<h3>persons</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">persons<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Person</span><span class="tsd-signature-symbol">[]</span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">persons<span class="tsd-signature-symbol">:</span> <a href="person.html" class="tsd-signature-type" data-tsd-kind="Interface">Person</a><span class="tsd-signature-symbol">[]</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
|
@ -246,6 +246,9 @@
|
|||
<li class=" tsd-kind-interface">
|
||||
<a href="item.html" class="tsd-kind-icon">Item</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-interface">
|
||||
<a href="person.html" class="tsd-kind-icon">Person</a>
|
||||
</li>
|
||||
</ul>
|
||||
<ul class="current">
|
||||
<li class="current tsd-kind-interface">
|
||||
|
|
|
@ -8,14 +8,14 @@ import * as facemesh from './blazeface/facemesh';
|
|||
import * as draw from './draw/draw';
|
||||
import { Tensor } from './tfjs/types';
|
||||
export type { Config } from './config';
|
||||
export type { Result, Face, Hand, Body, Item, Gesture } from './result';
|
||||
export type { Result, Face, Hand, Body, Item, Gesture, Person } from './result';
|
||||
export type { DrawOptions } from './draw/draw';
|
||||
/** Defines all possible input types for **Human** detection
|
||||
* @typedef Input
|
||||
* @typedef Input Type
|
||||
*/
|
||||
export declare type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
||||
/** Error message
|
||||
* @typedef Error
|
||||
* @typedef Error Type
|
||||
*/
|
||||
export declare type Error = {
|
||||
error: string;
|
||||
|
@ -115,11 +115,22 @@ export declare class Human {
|
|||
analyze: (...msg: any[]) => void;
|
||||
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
||||
* - Calculation is based on normalized Minkowski distance between
|
||||
*
|
||||
* @param embedding1: face descriptor as array of numbers
|
||||
* @param embedding2: face descriptor as array of numbers
|
||||
* @returns similarity: number
|
||||
*/
|
||||
similarity(embedding1: Array<number>, embedding2: Array<number>): number;
|
||||
/**
|
||||
* Segmentation method takes any input and returns processed canvas with body segmentation
|
||||
* Optional parameter background is used to fill the background with specific input
|
||||
* Segmentation is not triggered as part of detect process
|
||||
*
|
||||
* @param input: {@link Input}
|
||||
* @param background?: {@link Input}
|
||||
* @returns Canvas
|
||||
*/
|
||||
segmentation(input: Input, background?: Input): Promise<OffscreenCanvas | HTMLCanvasElement>;
|
||||
/** Enhance method performs additional enhacements to face image previously detected for futher processing
|
||||
* @param input: Tensor as provided in human.result.face[n].tensor
|
||||
* @returns Tensor
|
||||
|
@ -149,7 +160,8 @@ export declare class Human {
|
|||
/**
|
||||
* Runs interpolation using last known result and returns smoothened result
|
||||
* Interpolation is based on time since last known result so can be called independently
|
||||
* @param result?: use specific result set to run interpolation on
|
||||
*
|
||||
* @param result?: {@link Result} optional use specific result set to run interpolation on
|
||||
* @returns result: {@link Result}
|
||||
*/
|
||||
next: (result?: Result | undefined) => Result;
|
||||
|
@ -158,9 +170,10 @@ export declare class Human {
|
|||
* - Pre-process input: {@link Input}
|
||||
* - Run inference for all configured models
|
||||
* - Process and return result: {@link Result}
|
||||
*
|
||||
* @param input: Input
|
||||
* @param userConfig?: Config
|
||||
* @returns result: Result
|
||||
* @param userConfig?: {@link Config}
|
||||
* @returns result: {@link Result}
|
||||
*/
|
||||
detect(input: Input, userConfig?: Config | Record<string, unknown>): Promise<Result | Error>;
|
||||
/** Warmup metho pre-initializes all models for faster inference
|
||||
|
|
|
@ -2,7 +2,10 @@
|
|||
* Image Processing module used by Human
|
||||
*/
|
||||
import { Tensor } from '../tfjs/types';
|
||||
export declare function process(input: any, config: any): {
|
||||
import { Config } from '../config';
|
||||
declare type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
||||
export declare function process(input: Input, config: Config): {
|
||||
tensor: Tensor | null;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement;
|
||||
};
|
||||
export {};
|
||||
|
|
|
@ -128,6 +128,7 @@ export interface Item {
|
|||
boxRaw: [number, number, number, number];
|
||||
}
|
||||
/** Gesture results
|
||||
* @typedef Gesture Type
|
||||
*
|
||||
* Array of individual results with one object per detected gesture
|
||||
* Each result has:
|
||||
|
@ -148,6 +149,7 @@ export declare type Gesture = {
|
|||
gesture: string;
|
||||
};
|
||||
/** Person getter
|
||||
* @interface Person Interface
|
||||
*
|
||||
* Each result has:
|
||||
* - id: person id
|
||||
|
|
|
@ -3,9 +3,11 @@
|
|||
*/
|
||||
import { GraphModel, Tensor } from '../tfjs/types';
|
||||
import { Config } from '../config';
|
||||
export declare type Segmentation = boolean;
|
||||
declare type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
||||
export declare function load(config: Config): Promise<GraphModel>;
|
||||
export declare function predict(input: {
|
||||
tensor: Tensor | null;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement;
|
||||
}, config: Config): Promise<Segmentation>;
|
||||
}, config: Config): Promise<Uint8ClampedArray | null>;
|
||||
export declare function process(input: Input, background: Input | undefined, config: Config): Promise<HTMLCanvasElement | OffscreenCanvas>;
|
||||
export {};
|
||||
|
|
Loading…
Reference in New Issue