mirror of https://github.com/vladmandic/human
enhanced automated test coverage
parent
cadbadf52e
commit
66428abc12
|
@ -9,8 +9,9 @@
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/09/18 mandic00@live.com
|
||||
### **HEAD -> main** 2021/09/19 mandic00@live.com
|
||||
|
||||
- added configuration validation
|
||||
- prevent validation failed on some model combinations
|
||||
- webgl exception handling
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -4462,8 +4462,11 @@ function canvas(width, height) {
|
|||
}
|
||||
function process2(input, config3) {
|
||||
let tensor3;
|
||||
if (!input)
|
||||
throw new Error("input is missing");
|
||||
if (!input) {
|
||||
if (config3.debug)
|
||||
log("input is missing");
|
||||
return { tensor: null, canvas: null };
|
||||
}
|
||||
if (!(input instanceof tfjs_esm_exports.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
|
||||
throw new Error("input type is not recognized");
|
||||
}
|
||||
|
@ -4474,12 +4477,14 @@ function process2(input, config3) {
|
|||
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
|
||||
} else {
|
||||
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
|
||||
if (config3.debug)
|
||||
log("input stream is not ready");
|
||||
return { tensor: null, canvas: inCanvas };
|
||||
}
|
||||
const originalWidth = input["naturalWidth"] || input["videoWidth"] || input["width"] || input["shape"] && input["shape"][1] > 0;
|
||||
const originalHeight = input["naturalHeight"] || input["videoHeight"] || input["height"] || input["shape"] && input["shape"][2] > 0;
|
||||
if (!originalWidth || !originalHeight) {
|
||||
if (config3.debug)
|
||||
log("cannot determine input dimensions");
|
||||
return { tensor: null, canvas: inCanvas };
|
||||
}
|
||||
|
@ -10061,22 +10066,16 @@ async function predict11(input) {
|
|||
tfjs_esm_exports.dispose(resizeOutput);
|
||||
return data;
|
||||
}
|
||||
const overlay = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
overlay.width = width;
|
||||
overlay.height = height;
|
||||
const overlay = canvas(width, height);
|
||||
if (tfjs_esm_exports.browser)
|
||||
await tfjs_esm_exports.browser.toPixels(resizeOutput, overlay);
|
||||
tfjs_esm_exports.dispose(resizeOutput);
|
||||
const alphaCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
alphaCanvas.width = width;
|
||||
alphaCanvas.height = height;
|
||||
const alphaCanvas = canvas(width, height);
|
||||
const ctxAlpha = alphaCanvas.getContext("2d");
|
||||
ctxAlpha.filter = "blur(8px";
|
||||
await ctxAlpha.drawImage(overlay, 0, 0);
|
||||
const alpha = ctxAlpha.getImageData(0, 0, width, height).data;
|
||||
const original = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
original.width = width;
|
||||
original.height = height;
|
||||
const original = canvas(width, height);
|
||||
const ctx = original.getContext("2d");
|
||||
if (input.canvas)
|
||||
await ctx.drawImage(input.canvas, 0, 0);
|
||||
|
@ -10096,17 +10095,20 @@ async function process5(input, background, config3) {
|
|||
if (!model9)
|
||||
await load12(config3);
|
||||
const img = process2(input, config3);
|
||||
const tmp = process2(background, config3);
|
||||
if (!img.canvas || !tmp.canvas) {
|
||||
if (config3.debug)
|
||||
log("segmentation cannot process input or background");
|
||||
return null;
|
||||
}
|
||||
const alpha = await predict11(img);
|
||||
tfjs_esm_exports.dispose(img.tensor);
|
||||
if (background && alpha) {
|
||||
const tmp = process2(background, config3);
|
||||
const bg = tmp.canvas;
|
||||
tfjs_esm_exports.dispose(tmp.tensor);
|
||||
const fg = img.canvas;
|
||||
const fgData = (_a = fg.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, fg.width, fg.height).data;
|
||||
const c = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(fg.width, fg.height) : document.createElement("canvas");
|
||||
c.width = fg.width;
|
||||
c.height = fg.height;
|
||||
const c = canvas(fg.width, fg.height);
|
||||
const ctx = c.getContext("2d");
|
||||
ctx.globalCompositeOperation = "copy";
|
||||
ctx.drawImage(bg, 0, 0, c.width, c.height);
|
||||
|
@ -12388,6 +12390,7 @@ var Human = class {
|
|||
this.analyze("End Segmentation:");
|
||||
}
|
||||
if (!img.tensor) {
|
||||
if (this.config.debug)
|
||||
log("could not convert input to tensor");
|
||||
resolve({ error: "could not convert input to tensor" });
|
||||
return;
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -64519,8 +64519,11 @@ function canvas(width, height) {
|
|||
}
|
||||
function process2(input2, config3) {
|
||||
let tensor2;
|
||||
if (!input2)
|
||||
throw new Error("input is missing");
|
||||
if (!input2) {
|
||||
if (config3.debug)
|
||||
log("input is missing");
|
||||
return { tensor: null, canvas: null };
|
||||
}
|
||||
if (!(input2 instanceof Tensor) && !(typeof Image !== "undefined" && input2 instanceof Image) && !(typeof env2.Canvas !== "undefined" && input2 instanceof env2.Canvas) && !(typeof ImageData !== "undefined" && input2 instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input2 instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input2 instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input2 instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input2 instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input2 instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input2 instanceof OffscreenCanvas)) {
|
||||
throw new Error("input type is not recognized");
|
||||
}
|
||||
|
@ -64531,12 +64534,14 @@ function process2(input2, config3) {
|
|||
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input2.shape}`);
|
||||
} else {
|
||||
if (typeof input2["readyState"] !== "undefined" && input2["readyState"] <= 2) {
|
||||
if (config3.debug)
|
||||
log("input stream is not ready");
|
||||
return { tensor: null, canvas: inCanvas };
|
||||
}
|
||||
const originalWidth = input2["naturalWidth"] || input2["videoWidth"] || input2["width"] || input2["shape"] && input2["shape"][1] > 0;
|
||||
const originalHeight = input2["naturalHeight"] || input2["videoHeight"] || input2["height"] || input2["shape"] && input2["shape"][2] > 0;
|
||||
if (!originalWidth || !originalHeight) {
|
||||
if (config3.debug)
|
||||
log("cannot determine input dimensions");
|
||||
return { tensor: null, canvas: inCanvas };
|
||||
}
|
||||
|
@ -70118,22 +70123,16 @@ async function predict11(input2) {
|
|||
dispose(resizeOutput);
|
||||
return data;
|
||||
}
|
||||
const overlay = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
overlay.width = width;
|
||||
overlay.height = height;
|
||||
const overlay = canvas(width, height);
|
||||
if (browser_exports)
|
||||
await browser_exports.toPixels(resizeOutput, overlay);
|
||||
dispose(resizeOutput);
|
||||
const alphaCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
alphaCanvas.width = width;
|
||||
alphaCanvas.height = height;
|
||||
const alphaCanvas = canvas(width, height);
|
||||
const ctxAlpha = alphaCanvas.getContext("2d");
|
||||
ctxAlpha.filter = "blur(8px";
|
||||
await ctxAlpha.drawImage(overlay, 0, 0);
|
||||
const alpha = ctxAlpha.getImageData(0, 0, width, height).data;
|
||||
const original = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
original.width = width;
|
||||
original.height = height;
|
||||
const original = canvas(width, height);
|
||||
const ctx = original.getContext("2d");
|
||||
if (input2.canvas)
|
||||
await ctx.drawImage(input2.canvas, 0, 0);
|
||||
|
@ -70153,17 +70152,20 @@ async function process5(input2, background, config3) {
|
|||
if (!model10)
|
||||
await load12(config3);
|
||||
const img = process2(input2, config3);
|
||||
const tmp = process2(background, config3);
|
||||
if (!img.canvas || !tmp.canvas) {
|
||||
if (config3.debug)
|
||||
log("segmentation cannot process input or background");
|
||||
return null;
|
||||
}
|
||||
const alpha = await predict11(img);
|
||||
dispose(img.tensor);
|
||||
if (background && alpha) {
|
||||
const tmp = process2(background, config3);
|
||||
const bg = tmp.canvas;
|
||||
dispose(tmp.tensor);
|
||||
const fg = img.canvas;
|
||||
const fgData = (_a = fg.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, fg.width, fg.height).data;
|
||||
const c = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(fg.width, fg.height) : document.createElement("canvas");
|
||||
c.width = fg.width;
|
||||
c.height = fg.height;
|
||||
const c = canvas(fg.width, fg.height);
|
||||
const ctx = c.getContext("2d");
|
||||
ctx.globalCompositeOperation = "copy";
|
||||
ctx.drawImage(bg, 0, 0, c.width, c.height);
|
||||
|
@ -72445,6 +72447,7 @@ var Human = class {
|
|||
this.analyze("End Segmentation:");
|
||||
}
|
||||
if (!img.tensor) {
|
||||
if (this.config.debug)
|
||||
log("could not convert input to tensor");
|
||||
resolve({ error: "could not convert input to tensor" });
|
||||
return;
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -4496,8 +4496,11 @@ function canvas(width, height) {
|
|||
}
|
||||
function process2(input, config3) {
|
||||
let tensor3;
|
||||
if (!input)
|
||||
throw new Error("input is missing");
|
||||
if (!input) {
|
||||
if (config3.debug)
|
||||
log("input is missing");
|
||||
return { tensor: null, canvas: null };
|
||||
}
|
||||
if (!(input instanceof tf3.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
|
||||
throw new Error("input type is not recognized");
|
||||
}
|
||||
|
@ -4508,12 +4511,14 @@ function process2(input, config3) {
|
|||
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
|
||||
} else {
|
||||
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
|
||||
if (config3.debug)
|
||||
log("input stream is not ready");
|
||||
return { tensor: null, canvas: inCanvas };
|
||||
}
|
||||
const originalWidth = input["naturalWidth"] || input["videoWidth"] || input["width"] || input["shape"] && input["shape"][1] > 0;
|
||||
const originalHeight = input["naturalHeight"] || input["videoHeight"] || input["height"] || input["shape"] && input["shape"][2] > 0;
|
||||
if (!originalWidth || !originalHeight) {
|
||||
if (config3.debug)
|
||||
log("cannot determine input dimensions");
|
||||
return { tensor: null, canvas: inCanvas };
|
||||
}
|
||||
|
@ -10120,22 +10125,16 @@ async function predict11(input) {
|
|||
tf19.dispose(resizeOutput);
|
||||
return data;
|
||||
}
|
||||
const overlay = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
overlay.width = width;
|
||||
overlay.height = height;
|
||||
const overlay = canvas(width, height);
|
||||
if (tf19.browser)
|
||||
await tf19.browser.toPixels(resizeOutput, overlay);
|
||||
tf19.dispose(resizeOutput);
|
||||
const alphaCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
alphaCanvas.width = width;
|
||||
alphaCanvas.height = height;
|
||||
const alphaCanvas = canvas(width, height);
|
||||
const ctxAlpha = alphaCanvas.getContext("2d");
|
||||
ctxAlpha.filter = "blur(8px";
|
||||
await ctxAlpha.drawImage(overlay, 0, 0);
|
||||
const alpha = ctxAlpha.getImageData(0, 0, width, height).data;
|
||||
const original = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
original.width = width;
|
||||
original.height = height;
|
||||
const original = canvas(width, height);
|
||||
const ctx = original.getContext("2d");
|
||||
if (input.canvas)
|
||||
await ctx.drawImage(input.canvas, 0, 0);
|
||||
|
@ -10155,17 +10154,20 @@ async function process5(input, background, config3) {
|
|||
if (!model9)
|
||||
await load12(config3);
|
||||
const img = process2(input, config3);
|
||||
const tmp = process2(background, config3);
|
||||
if (!img.canvas || !tmp.canvas) {
|
||||
if (config3.debug)
|
||||
log("segmentation cannot process input or background");
|
||||
return null;
|
||||
}
|
||||
const alpha = await predict11(img);
|
||||
tf19.dispose(img.tensor);
|
||||
if (background && alpha) {
|
||||
const tmp = process2(background, config3);
|
||||
const bg = tmp.canvas;
|
||||
tf19.dispose(tmp.tensor);
|
||||
const fg = img.canvas;
|
||||
const fgData = (_a = fg.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, fg.width, fg.height).data;
|
||||
const c = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(fg.width, fg.height) : document.createElement("canvas");
|
||||
c.width = fg.width;
|
||||
c.height = fg.height;
|
||||
const c = canvas(fg.width, fg.height);
|
||||
const ctx = c.getContext("2d");
|
||||
ctx.globalCompositeOperation = "copy";
|
||||
ctx.drawImage(bg, 0, 0, c.width, c.height);
|
||||
|
@ -12451,6 +12453,7 @@ var Human = class {
|
|||
this.analyze("End Segmentation:");
|
||||
}
|
||||
if (!img.tensor) {
|
||||
if (this.config.debug)
|
||||
log("could not convert input to tensor");
|
||||
resolve({ error: "could not convert input to tensor" });
|
||||
return;
|
||||
|
|
|
@ -4497,8 +4497,11 @@ function canvas(width, height) {
|
|||
}
|
||||
function process2(input, config3) {
|
||||
let tensor3;
|
||||
if (!input)
|
||||
throw new Error("input is missing");
|
||||
if (!input) {
|
||||
if (config3.debug)
|
||||
log("input is missing");
|
||||
return { tensor: null, canvas: null };
|
||||
}
|
||||
if (!(input instanceof tf3.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
|
||||
throw new Error("input type is not recognized");
|
||||
}
|
||||
|
@ -4509,12 +4512,14 @@ function process2(input, config3) {
|
|||
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
|
||||
} else {
|
||||
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
|
||||
if (config3.debug)
|
||||
log("input stream is not ready");
|
||||
return { tensor: null, canvas: inCanvas };
|
||||
}
|
||||
const originalWidth = input["naturalWidth"] || input["videoWidth"] || input["width"] || input["shape"] && input["shape"][1] > 0;
|
||||
const originalHeight = input["naturalHeight"] || input["videoHeight"] || input["height"] || input["shape"] && input["shape"][2] > 0;
|
||||
if (!originalWidth || !originalHeight) {
|
||||
if (config3.debug)
|
||||
log("cannot determine input dimensions");
|
||||
return { tensor: null, canvas: inCanvas };
|
||||
}
|
||||
|
@ -10121,22 +10126,16 @@ async function predict11(input) {
|
|||
tf19.dispose(resizeOutput);
|
||||
return data;
|
||||
}
|
||||
const overlay = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
overlay.width = width;
|
||||
overlay.height = height;
|
||||
const overlay = canvas(width, height);
|
||||
if (tf19.browser)
|
||||
await tf19.browser.toPixels(resizeOutput, overlay);
|
||||
tf19.dispose(resizeOutput);
|
||||
const alphaCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
alphaCanvas.width = width;
|
||||
alphaCanvas.height = height;
|
||||
const alphaCanvas = canvas(width, height);
|
||||
const ctxAlpha = alphaCanvas.getContext("2d");
|
||||
ctxAlpha.filter = "blur(8px";
|
||||
await ctxAlpha.drawImage(overlay, 0, 0);
|
||||
const alpha = ctxAlpha.getImageData(0, 0, width, height).data;
|
||||
const original = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
original.width = width;
|
||||
original.height = height;
|
||||
const original = canvas(width, height);
|
||||
const ctx = original.getContext("2d");
|
||||
if (input.canvas)
|
||||
await ctx.drawImage(input.canvas, 0, 0);
|
||||
|
@ -10156,17 +10155,20 @@ async function process5(input, background, config3) {
|
|||
if (!model9)
|
||||
await load12(config3);
|
||||
const img = process2(input, config3);
|
||||
const tmp = process2(background, config3);
|
||||
if (!img.canvas || !tmp.canvas) {
|
||||
if (config3.debug)
|
||||
log("segmentation cannot process input or background");
|
||||
return null;
|
||||
}
|
||||
const alpha = await predict11(img);
|
||||
tf19.dispose(img.tensor);
|
||||
if (background && alpha) {
|
||||
const tmp = process2(background, config3);
|
||||
const bg = tmp.canvas;
|
||||
tf19.dispose(tmp.tensor);
|
||||
const fg = img.canvas;
|
||||
const fgData = (_a = fg.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, fg.width, fg.height).data;
|
||||
const c = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(fg.width, fg.height) : document.createElement("canvas");
|
||||
c.width = fg.width;
|
||||
c.height = fg.height;
|
||||
const c = canvas(fg.width, fg.height);
|
||||
const ctx = c.getContext("2d");
|
||||
ctx.globalCompositeOperation = "copy";
|
||||
ctx.drawImage(bg, 0, 0, c.width, c.height);
|
||||
|
@ -12452,6 +12454,7 @@ var Human = class {
|
|||
this.analyze("End Segmentation:");
|
||||
}
|
||||
if (!img.tensor) {
|
||||
if (this.config.debug)
|
||||
log("could not convert input to tensor");
|
||||
resolve({ error: "could not convert input to tensor" });
|
||||
return;
|
||||
|
|
|
@ -4496,8 +4496,11 @@ function canvas(width, height) {
|
|||
}
|
||||
function process2(input, config3) {
|
||||
let tensor3;
|
||||
if (!input)
|
||||
throw new Error("input is missing");
|
||||
if (!input) {
|
||||
if (config3.debug)
|
||||
log("input is missing");
|
||||
return { tensor: null, canvas: null };
|
||||
}
|
||||
if (!(input instanceof tf3.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
|
||||
throw new Error("input type is not recognized");
|
||||
}
|
||||
|
@ -4508,12 +4511,14 @@ function process2(input, config3) {
|
|||
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
|
||||
} else {
|
||||
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
|
||||
if (config3.debug)
|
||||
log("input stream is not ready");
|
||||
return { tensor: null, canvas: inCanvas };
|
||||
}
|
||||
const originalWidth = input["naturalWidth"] || input["videoWidth"] || input["width"] || input["shape"] && input["shape"][1] > 0;
|
||||
const originalHeight = input["naturalHeight"] || input["videoHeight"] || input["height"] || input["shape"] && input["shape"][2] > 0;
|
||||
if (!originalWidth || !originalHeight) {
|
||||
if (config3.debug)
|
||||
log("cannot determine input dimensions");
|
||||
return { tensor: null, canvas: inCanvas };
|
||||
}
|
||||
|
@ -10120,22 +10125,16 @@ async function predict11(input) {
|
|||
tf19.dispose(resizeOutput);
|
||||
return data;
|
||||
}
|
||||
const overlay = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
overlay.width = width;
|
||||
overlay.height = height;
|
||||
const overlay = canvas(width, height);
|
||||
if (tf19.browser)
|
||||
await tf19.browser.toPixels(resizeOutput, overlay);
|
||||
tf19.dispose(resizeOutput);
|
||||
const alphaCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
alphaCanvas.width = width;
|
||||
alphaCanvas.height = height;
|
||||
const alphaCanvas = canvas(width, height);
|
||||
const ctxAlpha = alphaCanvas.getContext("2d");
|
||||
ctxAlpha.filter = "blur(8px";
|
||||
await ctxAlpha.drawImage(overlay, 0, 0);
|
||||
const alpha = ctxAlpha.getImageData(0, 0, width, height).data;
|
||||
const original = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(width, height) : document.createElement("canvas");
|
||||
original.width = width;
|
||||
original.height = height;
|
||||
const original = canvas(width, height);
|
||||
const ctx = original.getContext("2d");
|
||||
if (input.canvas)
|
||||
await ctx.drawImage(input.canvas, 0, 0);
|
||||
|
@ -10155,17 +10154,20 @@ async function process5(input, background, config3) {
|
|||
if (!model9)
|
||||
await load12(config3);
|
||||
const img = process2(input, config3);
|
||||
const tmp = process2(background, config3);
|
||||
if (!img.canvas || !tmp.canvas) {
|
||||
if (config3.debug)
|
||||
log("segmentation cannot process input or background");
|
||||
return null;
|
||||
}
|
||||
const alpha = await predict11(img);
|
||||
tf19.dispose(img.tensor);
|
||||
if (background && alpha) {
|
||||
const tmp = process2(background, config3);
|
||||
const bg = tmp.canvas;
|
||||
tf19.dispose(tmp.tensor);
|
||||
const fg = img.canvas;
|
||||
const fgData = (_a = fg.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, fg.width, fg.height).data;
|
||||
const c = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(fg.width, fg.height) : document.createElement("canvas");
|
||||
c.width = fg.width;
|
||||
c.height = fg.height;
|
||||
const c = canvas(fg.width, fg.height);
|
||||
const ctx = c.getContext("2d");
|
||||
ctx.globalCompositeOperation = "copy";
|
||||
ctx.drawImage(bg, 0, 0, c.width, c.height);
|
||||
|
@ -12451,6 +12453,7 @@ var Human = class {
|
|||
this.analyze("End Segmentation:");
|
||||
}
|
||||
if (!img.tensor) {
|
||||
if (this.config.debug)
|
||||
log("could not convert input to tensor");
|
||||
resolve({ error: "could not convert input to tensor" });
|
||||
return;
|
||||
|
|
|
@ -66,7 +66,7 @@
|
|||
"@tensorflow/tfjs-layers": "^3.9.0",
|
||||
"@tensorflow/tfjs-node": "^3.9.0",
|
||||
"@tensorflow/tfjs-node-gpu": "^3.9.0",
|
||||
"@types/node": "^16.9.3",
|
||||
"@types/node": "^16.9.4",
|
||||
"@typescript-eslint/eslint-plugin": "^4.31.1",
|
||||
"@typescript-eslint/parser": "^4.31.1",
|
||||
"@vladmandic/build": "^0.5.2",
|
||||
|
|
|
@ -442,7 +442,7 @@ export class Human {
|
|||
}
|
||||
|
||||
if (!img.tensor) {
|
||||
log('could not convert input to tensor');
|
||||
if (this.config.debug) log('could not convert input to tensor');
|
||||
resolve({ error: 'could not convert input to tensor' });
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -39,9 +39,13 @@ export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas {
|
|||
// process input image and return tensor
|
||||
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement
|
||||
// input is resized and run through imagefx filter
|
||||
export function process(input: Input, config: Config): { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement } {
|
||||
export function process(input: Input, config: Config): { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null } {
|
||||
let tensor;
|
||||
if (!input) throw new Error('input is missing');
|
||||
if (!input) {
|
||||
// throw new Error('input is missing');
|
||||
if (config.debug) log('input is missing');
|
||||
return { tensor: null, canvas: null }; // video may become temporarily unavailable due to onresize
|
||||
}
|
||||
// sanity checks since different browsers do not implement all dom elements
|
||||
if (
|
||||
!(input instanceof tf.Tensor)
|
||||
|
@ -64,13 +68,13 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
|
|||
} else {
|
||||
// check if resizing will be needed
|
||||
if (typeof input['readyState'] !== 'undefined' && input['readyState'] <= 2) {
|
||||
log('input stream is not ready');
|
||||
if (config.debug) log('input stream is not ready');
|
||||
return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
|
||||
}
|
||||
const originalWidth = input['naturalWidth'] || input['videoWidth'] || input['width'] || (input['shape'] && (input['shape'][1] > 0));
|
||||
const originalHeight = input['naturalHeight'] || input['videoHeight'] || input['height'] || (input['shape'] && (input['shape'][2] > 0));
|
||||
if (!originalWidth || !originalHeight) {
|
||||
log('cannot determine input dimensions');
|
||||
if (config.debug) log('cannot determine input dimensions');
|
||||
return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
|
||||
}
|
||||
let targetWidth = originalWidth;
|
||||
|
|
|
@ -67,25 +67,19 @@ export async function predict(input: { tensor: Tensor | null, canvas: OffscreenC
|
|||
return data; // we're running in nodejs so return alpha array as-is
|
||||
}
|
||||
|
||||
const overlay = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(width, height) : document.createElement('canvas');
|
||||
overlay.width = width;
|
||||
overlay.height = height;
|
||||
const overlay = image.canvas(width, height);
|
||||
if (tf.browser) await tf.browser.toPixels(resizeOutput, overlay);
|
||||
tf.dispose(resizeOutput);
|
||||
|
||||
// get alpha channel data
|
||||
const alphaCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(width, height) : document.createElement('canvas'); // need one more copy since input may already have gl context so 2d context fails
|
||||
alphaCanvas.width = width;
|
||||
alphaCanvas.height = height;
|
||||
const alphaCanvas = image.canvas(width, height);
|
||||
const ctxAlpha = alphaCanvas.getContext('2d') as CanvasRenderingContext2D;
|
||||
ctxAlpha.filter = 'blur(8px';
|
||||
await ctxAlpha.drawImage(overlay, 0, 0);
|
||||
const alpha = ctxAlpha.getImageData(0, 0, width, height).data;
|
||||
|
||||
// get original canvas merged with overlay
|
||||
const original = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(width, height) : document.createElement('canvas'); // need one more copy since input may already have gl context so 2d context fails
|
||||
original.width = width;
|
||||
original.height = height;
|
||||
const original = image.canvas(width, height);
|
||||
const ctx = original.getContext('2d') as CanvasRenderingContext2D;
|
||||
if (input.canvas) await ctx.drawImage(input.canvas, 0, 0);
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/globalCompositeOperation // best options are: darken, color-burn, multiply
|
||||
|
@ -105,19 +99,21 @@ export async function process(input: Input, background: Input | undefined, confi
|
|||
busy = true;
|
||||
if (!model) await load(config);
|
||||
const img = image.process(input, config);
|
||||
const tmp = image.process(background, config);
|
||||
if (!img.canvas || !tmp.canvas) {
|
||||
if (config.debug) log('segmentation cannot process input or background');
|
||||
return null;
|
||||
}
|
||||
const alpha = await predict(img);
|
||||
tf.dispose(img.tensor);
|
||||
|
||||
if (background && alpha) {
|
||||
const tmp = image.process(background, config);
|
||||
const bg = tmp.canvas;
|
||||
const bg = tmp.canvas as HTMLCanvasElement;
|
||||
tf.dispose(tmp.tensor);
|
||||
const fg = img.canvas;
|
||||
const fg = img.canvas as HTMLCanvasElement;
|
||||
const fgData = fg.getContext('2d')?.getImageData(0, 0, fg.width, fg.height).data as Uint8ClampedArray;
|
||||
|
||||
const c = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(fg.width, fg.height) : document.createElement('canvas');
|
||||
c.width = fg.width;
|
||||
c.height = fg.height;
|
||||
const c = image.canvas(fg.width, fg.height);
|
||||
const ctx = c.getContext('2d') as CanvasRenderingContext2D;
|
||||
|
||||
ctx.globalCompositeOperation = 'copy'; // reset
|
||||
|
|
|
@ -1,68 +1,24 @@
|
|||
2021-09-19 13:55:00 [36mINFO: [39m @vladmandic/human version 2.2.2
|
||||
2021-09-19 13:55:00 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-09-19 13:55:00 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.2.2"}
|
||||
2021-09-19 13:55:00 [36mINFO: [39m Environment: {"profile":"production","config":"build.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2021-09-19 13:55:00 [36mINFO: [39m Toolchain: {"build":"0.5.2","esbuild":"0.12.28","typescript":"4.4.3","typedoc":"0.22.4","eslint":"7.32.0"}
|
||||
2021-09-19 13:55:00 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2021-09-19 13:55:00 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2021-09-19 13:55:00 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1416}
|
||||
2021-09-19 13:55:01 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":50,"inputBytes":481770,"outputBytes":408805}
|
||||
2021-09-19 13:55:01 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1424}
|
||||
2021-09-19 13:55:01 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":50,"inputBytes":481778,"outputBytes":408809}
|
||||
2021-09-19 13:55:01 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1491}
|
||||
2021-09-19 13:55:01 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":50,"inputBytes":481845,"outputBytes":408881}
|
||||
2021-09-19 13:55:01 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1631}
|
||||
2021-09-19 13:55:01 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":3088,"outputBytes":793}
|
||||
2021-09-19 13:55:01 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":50,"inputBytes":481147,"outputBytes":410098}
|
||||
2021-09-19 13:55:01 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":8,"inputBytes":3088,"outputBytes":2376809}
|
||||
2021-09-19 13:55:01 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":50,"inputBytes":2857163,"outputBytes":1409412}
|
||||
2021-09-19 13:55:02 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":50,"inputBytes":2857163,"outputBytes":2614017}
|
||||
2021-09-19 13:55:18 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types","files":94}
|
||||
2021-09-19 13:55:23 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":29,"generated":true}
|
||||
2021-09-19 13:55:51 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":83,"errors":0,"warnings":0}
|
||||
2021-09-19 13:55:52 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2021-09-19 13:55:52 [36mINFO: [39m Done...
|
||||
2021-09-19 14:14:35 [36mINFO: [39m @vladmandic/human version 2.2.2
|
||||
2021-09-19 14:14:35 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-09-19 14:14:35 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.2.2"}
|
||||
2021-09-19 14:14:35 [36mINFO: [39m Environment: {"profile":"development","config":"build.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2021-09-19 14:14:35 [36mINFO: [39m Toolchain: {"build":"0.5.2","esbuild":"0.12.28","typescript":"4.4.3","typedoc":"0.22.4","eslint":"7.32.0"}
|
||||
2021-09-19 14:14:35 [36mINFO: [39m Build: {"profile":"development","steps":["serve","watch","compile"]}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m WebServer: {"ssl":false,"port":10030,"root":"."}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m WebServer: {"ssl":true,"port":10031,"root":".","sslKey":"node_modules/@vladmandic/build/cert/https.key","sslCrt":"node_modules/@vladmandic/build/cert/https.crt"}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m Watch: {"locations":["src/**","README.md","src/**/*","tfjs/**/*"]}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1416}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":50,"inputBytes":481782,"outputBytes":408817}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1424}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":50,"inputBytes":481790,"outputBytes":408821}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1491}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":50,"inputBytes":481857,"outputBytes":408893}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1631}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":3088,"outputBytes":793}
|
||||
2021-09-19 14:14:35 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":50,"inputBytes":481159,"outputBytes":410110}
|
||||
2021-09-19 14:14:36 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":8,"inputBytes":3088,"outputBytes":2376809}
|
||||
2021-09-19 14:14:36 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":50,"inputBytes":2857175,"outputBytes":1409415}
|
||||
2021-09-19 14:14:37 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":50,"inputBytes":2857175,"outputBytes":2614029}
|
||||
2021-09-19 14:14:37 [36mINFO: [39m Listening...
|
||||
2021-09-19 14:15:02 [36mINFO: [39m @vladmandic/human version 2.2.2
|
||||
2021-09-19 14:15:02 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-09-19 14:15:02 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.2.2"}
|
||||
2021-09-19 14:15:02 [36mINFO: [39m Environment: {"profile":"development","config":"build.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2021-09-19 14:15:02 [36mINFO: [39m Toolchain: {"build":"0.5.2","esbuild":"0.12.28","typescript":"4.4.3","typedoc":"0.22.4","eslint":"7.32.0"}
|
||||
2021-09-19 14:15:02 [36mINFO: [39m Build: {"profile":"development","steps":["serve","watch","compile"]}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m WebServer: {"ssl":false,"port":10030,"root":"."}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m WebServer: {"ssl":true,"port":10031,"root":".","sslKey":"node_modules/@vladmandic/build/cert/https.key","sslCrt":"node_modules/@vladmandic/build/cert/https.crt"}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Watch: {"locations":["src/**","README.md","src/**/*","tfjs/**/*"]}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1416}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":50,"inputBytes":481794,"outputBytes":408829}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1424}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":50,"inputBytes":481802,"outputBytes":408833}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1491}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":50,"inputBytes":481869,"outputBytes":408905}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1631}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":3088,"outputBytes":793}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":50,"inputBytes":481171,"outputBytes":410122}
|
||||
2021-09-19 14:15:02 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":8,"inputBytes":3088,"outputBytes":2376809}
|
||||
2021-09-19 14:15:03 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":50,"inputBytes":2857187,"outputBytes":1409418}
|
||||
2021-09-19 14:15:03 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":50,"inputBytes":2857187,"outputBytes":2614041}
|
||||
2021-09-19 14:15:03 [36mINFO: [39m Listening...
|
||||
2021-09-20 09:41:09 [36mINFO: [39m @vladmandic/human version 2.2.2
|
||||
2021-09-20 09:41:09 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-09-20 09:41:09 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.2.2"}
|
||||
2021-09-20 09:41:09 [36mINFO: [39m Environment: {"profile":"production","config":"build.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2021-09-20 09:41:09 [36mINFO: [39m Toolchain: {"build":"0.5.2","esbuild":"0.12.28","typescript":"4.4.3","typedoc":"0.22.4","eslint":"7.32.0"}
|
||||
2021-09-20 09:41:09 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2021-09-20 09:41:09 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2021-09-20 09:41:09 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1416}
|
||||
2021-09-20 09:41:09 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":50,"inputBytes":481478,"outputBytes":408546}
|
||||
2021-09-20 09:41:09 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1424}
|
||||
2021-09-20 09:41:09 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":50,"inputBytes":481486,"outputBytes":408550}
|
||||
2021-09-20 09:41:09 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1491}
|
||||
2021-09-20 09:41:09 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":50,"inputBytes":481553,"outputBytes":408622}
|
||||
2021-09-20 09:41:09 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1631}
|
||||
2021-09-20 09:41:09 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":3088,"outputBytes":793}
|
||||
2021-09-20 09:41:09 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":50,"inputBytes":480855,"outputBytes":409839}
|
||||
2021-09-20 09:41:10 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":8,"inputBytes":3088,"outputBytes":2376809}
|
||||
2021-09-20 09:41:10 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":50,"inputBytes":2856871,"outputBytes":1409130}
|
||||
2021-09-20 09:41:11 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":50,"inputBytes":2856871,"outputBytes":2613758}
|
||||
2021-09-20 09:41:27 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types","files":94}
|
||||
2021-09-20 09:41:32 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":29,"generated":true}
|
||||
2021-09-20 09:41:58 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":83,"errors":0,"warnings":0}
|
||||
2021-09-20 09:41:58 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2021-09-20 09:41:58 [36mINFO: [39m Done...
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
const fs = require('fs');
|
||||
const process = require('process');
|
||||
const canvasJS = require('canvas');
|
||||
|
||||
|
@ -165,6 +166,13 @@ async function test(Human, inputConfig) {
|
|||
else log('error', 'failed: configuration default validation', invalid);
|
||||
delete config.invalid;
|
||||
|
||||
// test model loading
|
||||
await human.load();
|
||||
const models = Object.keys(human.models).map((model) => ({ name: model, loaded: (human.models[model] !== null) }));
|
||||
const loaded = models.filter((model) => model.loaded);
|
||||
if (models.length === 14 && loaded.length === 7) log('state', 'passed: models loaded', models.length, loaded.length);
|
||||
else log('error', 'failed: models loaded', models.length, loaded.length);
|
||||
|
||||
// test warmup sequences
|
||||
await testInstance(human);
|
||||
config.warmup = 'none';
|
||||
|
@ -175,28 +183,73 @@ async function test(Human, inputConfig) {
|
|||
res = await testWarmup(human, 'default');
|
||||
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 0 || res?.gesture?.length !== 3) log('error', 'failed: warmup face result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
|
||||
else log('state', 'passed: warmup face result match');
|
||||
|
||||
config.warmup = 'body';
|
||||
res = await testWarmup(human, 'default');
|
||||
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 0 || res?.gesture?.length !== 3) log('error', 'failed: warmup body result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
|
||||
else log('state', 'passed: warmup body result match');
|
||||
|
||||
// test default config
|
||||
// test default config async
|
||||
log('info', 'test default');
|
||||
human.reset();
|
||||
config.async = true;
|
||||
config.cacheSensitivity = 0;
|
||||
res = await testDetect(human, 'samples/ai-body.jpg', 'default');
|
||||
if (!res || res?.face?.length !== 1 || res?.face[0].gender !== 'female') log('error', 'failed: default result face mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
|
||||
else log('state', 'passed: default result face match');
|
||||
|
||||
// test default config
|
||||
// test default config sync
|
||||
log('info', 'test sync');
|
||||
human.reset();
|
||||
config.async = false;
|
||||
config.cacheSensitivity = 0;
|
||||
res = await testDetect(human, 'samples/ai-body.jpg', 'default');
|
||||
if (!res || res?.face?.length !== 1 || res?.face[0].gender !== 'female') log('error', 'failed: default sync', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
|
||||
else log('state', 'passed: default sync');
|
||||
|
||||
// test image processing
|
||||
const img1 = await human.image(null);
|
||||
const img2 = await human.image(await getImage(human, 'samples/ai-face.jpg'));
|
||||
if (!img1 || !img2 || img1.tensor !== null || img2.tensor?.shape?.length !== 4) log('error', 'failed: image input', img1?.tensor?.shape, img2?.tensor?.shape);
|
||||
else log('state', 'passed: image input', img1?.tensor?.shape, img2?.tensor?.shape);
|
||||
|
||||
// test null input
|
||||
res = await human.detect(null);
|
||||
if (!res || !res.error) log('error', 'failed: invalid input', res);
|
||||
else log('state', 'passed: invalid input', res);
|
||||
|
||||
// test face similarity
|
||||
log('info', 'test face similarity');
|
||||
human.reset();
|
||||
config.async = false;
|
||||
config.cacheSensitivity = 0;
|
||||
let res1 = await testDetect(human, 'samples/ai-face.jpg', 'default');
|
||||
let res2 = await testDetect(human, 'samples/ai-body.jpg', 'default');
|
||||
let res3 = await testDetect(human, 'samples/ai-upper.jpg', 'default');
|
||||
const desc1 = res1 && res1.face && res1.face[0] && res1.face[0].embedding ? [...res1.face[0].embedding] : null;
|
||||
const desc2 = res2 && res2.face && res2.face[0] && res2.face[0].embedding ? [...res2.face[0].embedding] : null;
|
||||
const desc3 = res3 && res3.face && res3.face[0] && res3.face[0].embedding ? [...res3.face[0].embedding] : null;
|
||||
if (!desc1 || !desc2 || !desc3 || desc1.length !== 1024 || desc2.length !== 1024 || desc3.length !== 1024) log('error', 'failed: face descriptor', desc1?.length, desc2?.length, desc3?.length);
|
||||
else log('state', 'passed: face descriptor');
|
||||
res1 = Math.round(100 * human.similarity(desc1, desc2));
|
||||
res2 = Math.round(100 * human.similarity(desc1, desc3));
|
||||
res3 = Math.round(100 * human.similarity(desc2, desc3));
|
||||
if (res1 !== 51 || res2 !== 49 || res3 !== 53) log('error', 'failed: face match ', res1, res2, res3);
|
||||
else log('state', 'passed: face match');
|
||||
|
||||
// test face matching
|
||||
log('info', 'test face matching');
|
||||
let db = [];
|
||||
try {
|
||||
db = JSON.parse(fs.readFileSync('demo/facematch/faces.json').toString());
|
||||
} catch { /***/ }
|
||||
if (db.length < 100) log('error', 'failed: face database ', db.length);
|
||||
else log('state', 'passed: face database', db.length);
|
||||
res1 = human.match(desc1, db);
|
||||
res2 = human.match(desc2, db);
|
||||
res3 = human.match(desc3, db);
|
||||
if (!res1 || !res1['name'] || !res2 || !res2['name'] || !res3 || !res3['name']) log('error', 'failed: face match ', res1);
|
||||
else log('state', 'passed: face match', { first: { name: res1.name, similarity: res1.similarity } }, { second: { name: res2.name, similarity: res2.similarity } }, { third: { name: res3.name, similarity: res3.similarity } });
|
||||
|
||||
// test object detection
|
||||
log('info', 'test object');
|
||||
human.reset();
|
||||
|
@ -273,6 +326,22 @@ async function test(Human, inputConfig) {
|
|||
testDetect(second, 'samples/ai-upper.jpg', 'default'),
|
||||
]);
|
||||
|
||||
// test monkey-patch
|
||||
human.env.Canvas = canvasJS.Canvas; // monkey-patch human to use external canvas library
|
||||
const inputImage = await canvasJS.loadImage('samples/ai-face.jpg'); // load image using canvas library
|
||||
const inputCanvas = new canvasJS.Canvas(inputImage.width, inputImage.height); // create canvas
|
||||
const ctx = inputCanvas.getContext('2d');
|
||||
ctx.drawImage(inputImage, 0, 0); // draw input image onto canvas
|
||||
res = await human.detect(inputCanvas);
|
||||
if (!res || res?.face?.length !== 1) log('error', 'failed: monkey patch');
|
||||
else log('state', 'passed: monkey patch');
|
||||
|
||||
// test segmentation
|
||||
res = await human.segmentation(inputCanvas, inputCanvas);
|
||||
if (!res || !res.width || !res.height) log('error', 'failed: segmentation', res);
|
||||
else log('state', 'passed: segmentation', [res.width, res.height]);
|
||||
human.env.Canvas = undefined;
|
||||
|
||||
// tests end
|
||||
const t1 = process.hrtime.bigint();
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ const config = {
|
|||
},
|
||||
hand: { enabled: true, rotation: false },
|
||||
body: { enabled: true },
|
||||
object: { enabled: false },
|
||||
object: { enabled: true },
|
||||
segmentation: { enabled: true },
|
||||
filter: { enabled: false },
|
||||
};
|
||||
|
|
|
@ -60,6 +60,7 @@ function logStdIO(ok, test, buffer) {
|
|||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
async function runTest(test) {
|
||||
log.info();
|
||||
log.info(test, 'start');
|
||||
|
@ -73,6 +74,7 @@ async function runTest(test) {
|
|||
});
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
async function runDemo(demo) {
|
||||
log.info();
|
||||
log.info(demo, 'start');
|
||||
|
@ -94,7 +96,8 @@ async function testAll() {
|
|||
process.on('unhandledRejection', (data) => log.error('nodejs unhandled rejection', data));
|
||||
process.on('uncaughtException', (data) => log.error('nodejs unhandled exception', data));
|
||||
log.info('tests:', tests);
|
||||
for (const demo of demos) await runDemo(demo);
|
||||
log.info('demos:', demos);
|
||||
// for (const demo of demos) await runDemo(demo);
|
||||
for (const test of tests) await runTest(test);
|
||||
log.info();
|
||||
log.info('status:', status);
|
||||
|
|
1173
test/test.log
1173
test/test.log
File diff suppressed because it is too large
Load Diff
|
@ -74,9 +74,9 @@
|
|||
</div></li></ul></section><section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class"><a name="enhance" class="tsd-anchor"></a><h3>enhance</h3><ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class"><li class="tsd-signature tsd-kind-icon">enhance<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></li></ul><ul class="tsd-descriptions"><li class="tsd-description"><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/human.ts#L298">human.ts:298</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Enhance method performs additional enhacements to face image previously detected for futher processing</p>
|
||||
</div></div><h4 class="tsd-parameters-title">Parameters</h4><ul class="tsd-parameters"><li><h5>input: <span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></h5></li></ul><h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></h4><div><p>Tensor</p>
|
||||
</div></li></ul></section><section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class"><a name="image" class="tsd-anchor"></a><h3>image</h3><ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class"><li class="tsd-signature tsd-kind-icon">image<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><a href="../index.html#Input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">; </span>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span><span class="tsd-signature-symbol"> }</span></li></ul><ul class="tsd-descriptions"><li class="tsd-description"><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/human.ts#L265">human.ts:265</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></li></ul></section><section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class"><a name="image" class="tsd-anchor"></a><h3>image</h3><ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class"><li class="tsd-signature tsd-kind-icon">image<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><a href="../index.html#Input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">; </span>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span><span class="tsd-signature-symbol"> }</span></li></ul><ul class="tsd-descriptions"><li class="tsd-description"><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/human.ts#L265">human.ts:265</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Process input as return canvas and tensor</p>
|
||||
</div></div><h4 class="tsd-parameters-title">Parameters</h4><ul class="tsd-parameters"><li><h5>input: <a href="../index.html#Input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a></h5></li></ul><h4 class="tsd-returns-title">Returns <span class="tsd-signature-symbol">{ </span>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">; </span>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span><span class="tsd-signature-symbol"> }</span></h4><div></div><ul class="tsd-parameters"><li class="tsd-parameter"><h5>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span></h5></li><li class="tsd-parameter"><h5>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></h5></li></ul></li></ul></section><section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class"><a name="init" class="tsd-anchor"></a><h3>init</h3><ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class"><li class="tsd-signature tsd-kind-icon">init<span class="tsd-signature-symbol">(</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">void</span></li></ul><ul class="tsd-descriptions"><li class="tsd-description"><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/human.ts#L321">human.ts:321</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
</div></div><h4 class="tsd-parameters-title">Parameters</h4><ul class="tsd-parameters"><li><h5>input: <a href="../index.html#Input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a></h5></li></ul><h4 class="tsd-returns-title">Returns <span class="tsd-signature-symbol">{ </span>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">; </span>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span><span class="tsd-signature-symbol"> }</span></h4><div></div><ul class="tsd-parameters"><li class="tsd-parameter"><h5>canvas<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span></h5></li><li class="tsd-parameter"><h5>tensor<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Tensor</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">Rank</span><span class="tsd-signature-symbol">></span></h5></li></ul></li></ul></section><section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class"><a name="init" class="tsd-anchor"></a><h3>init</h3><ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class"><li class="tsd-signature tsd-kind-icon">init<span class="tsd-signature-symbol">(</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">void</span></li></ul><ul class="tsd-descriptions"><li class="tsd-description"><aside class="tsd-sources"><ul><li>Defined in <a href="https://github.com/vladmandic/human/blob/main/src/human.ts#L321">human.ts:321</a></li></ul></aside><div class="tsd-comment tsd-typography"><div class="lead">
|
||||
<p>Explicit backend initialization</p>
|
||||
<ul>
|
||||
<li>Normally done implicitly during initial load phase</li>
|
||||
|
|
|
@ -156,7 +156,7 @@ export declare class Human {
|
|||
*/
|
||||
image: (input: Input) => {
|
||||
tensor: Tensor<import("@tensorflow/tfjs-core").Rank> | null;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement | null;
|
||||
};
|
||||
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
||||
* - Calculation is based on normalized Minkowski distance between two descriptors
|
||||
|
|
|
@ -8,7 +8,7 @@ declare type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLM
|
|||
export declare function canvas(width: any, height: any): HTMLCanvasElement | OffscreenCanvas;
|
||||
export declare function process(input: Input, config: Config): {
|
||||
tensor: Tensor | null;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement | null;
|
||||
};
|
||||
export declare function skip(config: any, input: Tensor): Promise<boolean>;
|
||||
export {};
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"image.d.ts","sourceRoot":"","sources":["../../../src/image/image.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,EAAE,GAAG,EAAE,MAAM,QAAQ,CAAC;AAG7B,aAAK,KAAK,GAAG,MAAM,GAAG,SAAS,GAAG,WAAW,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,iBAAiB,GAAG,eAAe,GAAG,OAAO,KAAK,GAAG,OAAO,GAAG,CAAC,MAAM,CAAC;AAShL,wBAAgB,MAAM,CAAC,KAAK,KAAA,EAAE,MAAM,KAAA,GAAG,iBAAiB,GAAG,eAAe,CAgBzE;AAKD,wBAAgB,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,GAAG;IAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IAAC,MAAM,EAAE,eAAe,GAAG,iBAAiB,CAAA;CAAE,CAgL5H;AAID,wBAAsB,IAAI,CAAC,MAAM,KAAA,EAAE,KAAK,EAAE,MAAM,oBA2B/C"}
|
||||
{"version":3,"file":"image.d.ts","sourceRoot":"","sources":["../../../src/image/image.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,EAAE,GAAG,EAAE,MAAM,QAAQ,CAAC;AAG7B,aAAK,KAAK,GAAG,MAAM,GAAG,SAAS,GAAG,WAAW,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,iBAAiB,GAAG,eAAe,GAAG,OAAO,KAAK,GAAG,OAAO,GAAG,CAAC,MAAM,CAAC;AAShL,wBAAgB,MAAM,CAAC,KAAK,KAAA,EAAE,MAAM,KAAA,GAAG,iBAAiB,GAAG,eAAe,CAgBzE;AAKD,wBAAgB,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,GAAG;IAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IAAC,MAAM,EAAE,eAAe,GAAG,iBAAiB,GAAG,IAAI,CAAA;CAAE,CAoLnI;AAID,wBAAsB,IAAI,CAAC,MAAM,KAAA,EAAE,KAAK,EAAE,MAAM,oBA2B/C"}
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"segmentation.d.ts","sourceRoot":"","sources":["../../../src/segmentation/segmentation.ts"],"names":[],"mappings":"AAAA;;GAEG;AAKH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAGxC,aAAK,KAAK,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,SAAS,GAAG,WAAW,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,iBAAiB,GAAG,eAAe,CAAC;AAK5J,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAO9D;AAED,wBAAsB,OAAO,CAAC,KAAK,EAAE;IAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IAAC,MAAM,EAAE,eAAe,GAAG,iBAAiB,GAAG,IAAI,CAAA;CAAE,GAAG,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,CA2ErJ;AAED,wBAAsB,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,KAAK,GAAG,SAAS,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,iBAAiB,GAAG,eAAe,GAAG,IAAI,CAAC,CAkC9I"}
|
||||
{"version":3,"file":"segmentation.d.ts","sourceRoot":"","sources":["../../../src/segmentation/segmentation.ts"],"names":[],"mappings":"AAAA;;GAEG;AAKH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAGxC,aAAK,KAAK,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,SAAS,GAAG,WAAW,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,gBAAgB,GAAG,iBAAiB,GAAG,eAAe,CAAC;AAK5J,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAO9D;AAED,wBAAsB,OAAO,CAAC,KAAK,EAAE;IAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IAAC,MAAM,EAAE,eAAe,GAAG,iBAAiB,GAAG,IAAI,CAAA;CAAE,GAAG,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,CAqErJ;AAED,wBAAsB,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,KAAK,GAAG,SAAS,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,iBAAiB,GAAG,eAAe,GAAG,IAAI,CAAC,CAoC9I"}
|
Loading…
Reference in New Issue