webgl exception handling

pull/193/head
Vladimir Mandic 2021-09-17 14:07:44 -04:00
parent 4ebec10dd3
commit 1c7d50e834
20 changed files with 3062 additions and 691 deletions

View File

@ -34,6 +34,7 @@ let userConfig = {
warmup: 'none',
backend: 'humangl',
debug: true,
filter: { enabled: false },
/*
wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/',
async: false,
@ -80,7 +81,7 @@ const ui = {
useWorker: true, // use web workers for processing
worker: 'index-worker.js',
maxFPSframes: 10, // keep fps history for how many frames
modelsPreload: true, // preload human models on startup
modelsPreload: false, // preload human models on startup
modelsWarmup: false, // warmup human models on startup
buffered: true, // should output be buffered between frames
interpolated: true, // should output be interpolated for smoothness between frames
@ -180,7 +181,7 @@ function status(msg) {
async function videoPlay() {
document.getElementById('btnStartText').innerHTML = 'pause video';
await document.getElementById('video').play();
status();
// status();
}
async function videoPause() {
@ -337,7 +338,7 @@ async function setupCamera() {
} catch (err) {
log(err);
} finally {
status();
// status();
}
return '';
}
@ -394,28 +395,22 @@ async function setupCamera() {
if (initialCameraAccess) log('selected video source:', track, settings); // log('selected camera:', track.label, 'id:', settings.deviceId);
ui.camera = { name: track.label.toLowerCase(), width: video.videoWidth, height: video.videoHeight, facing: settings.facingMode === 'user' ? 'front' : 'back' };
initialCameraAccess = false;
const promise = !stream || new Promise((resolve) => {
video.onloadeddata = () => {
if (!stream) return 'camera stream empty';
const ready = new Promise((resolve) => (video.onloadeddata = () => resolve(true)));
video.srcObject = stream;
await ready;
if (settings.width > settings.height) canvas.style.width = '100vw';
else canvas.style.height = '100vh';
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
ui.menuWidth.input.setAttribute('value', video.videoWidth);
ui.menuHeight.input.setAttribute('value', video.videoHeight);
if (live || ui.autoPlay) videoPlay();
if (live || ui.autoPlay) await videoPlay();
// eslint-disable-next-line no-use-before-define
if ((live || ui.autoPlay) && !ui.detectThread) runHumanDetect(video, canvas);
ui.busy = false;
resolve();
};
});
// attach input to video element
if (stream) {
video.srcObject = stream;
return promise;
}
ui.busy = false;
return 'camera stream empty';
return 'camera stream ready';
}
function initPerfMonitor() {
@ -500,9 +495,8 @@ function runHumanDetect(input, canvas, timestamp) {
// perform detection in worker
webWorker(input, data, canvas, timestamp);
} else {
if (human.env.initial) status('starting detection');
else status();
human.detect(input, userConfig).then((result) => {
status();
/*
setTimeout(async () => { // simulate gl context lost 2sec after initial detection
const ext = human.gl && human.gl.gl ? human.gl.gl.getExtension('WEBGL_lose_context') : {};
@ -926,15 +920,16 @@ async function pwaRegister() {
}
async function main() {
/*
window.addEventListener('unhandledrejection', (evt) => {
if (ui.detectThread) cancelAnimationFrame(ui.detectThread);
if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
const msg = evt.reason.message || evt.reason || evt;
// eslint-disable-next-line no-console
console.error(evt.reason || evt);
document.getElementById('log').innerHTML = evt.reason.message || evt.reason || evt;
status('exception error');
console.error(msg);
document.getElementById('log').innerHTML = msg;
status(`exception: ${msg}`);
evt.preventDefault();
});
*/
log('demo starting ...');
@ -945,7 +940,7 @@ async function main() {
// sanity check for webworker compatibility
if (typeof Worker === 'undefined' || typeof OffscreenCanvas === 'undefined') {
ui.useWorker = false;
log('workers are disabled due to missing browser functionality');
log('webworker functionality is disabled due to missing browser functionality');
}
// register PWA ServiceWorker
@ -1010,6 +1005,8 @@ async function main() {
await human.load(userConfig); // this is not required, just pre-loads all models
const loaded = Object.keys(human.models).filter((a) => human.models[a]);
log('demo loaded models:', loaded);
} else {
await human.init();
}
// warmup models

View File

@ -55,7 +55,7 @@ function join(folder, file) {
const skipJoin = file.startsWith(".") || file.startsWith("/") || file.startsWith("http:") || file.startsWith("https:") || file.startsWith("file:");
const path = skipJoin ? `${file}` : `${folder}${separator}${file}`;
if (!path.toLocaleLowerCase().includes(".json"))
throw new Error(`Human: ModelPath Error: ${path} Expecting JSON file`);
throw new Error(`modelpath error: ${path} expecting json file`);
return path;
}
function log(...msg) {
@ -3731,7 +3731,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS))
throw new Error("Filter: GL compile failed", gl.getShaderInfoLog(shader));
throw new Error("filter: gl compile failed", gl.getShaderInfoLog(shader));
return shader;
};
this.uniform = {};
@ -3743,7 +3743,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.attachShader(this.id, _fsh);
gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS))
throw new Error("Filter: GL link failed", gl.getProgramInfoLog(this.id));
throw new Error("filter: gl link failed", gl.getProgramInfoLog(this.id));
gl.useProgram(this.id);
_collect(vertexSource, "attribute", this.attribute);
for (const a in this.attribute)
@ -3772,7 +3772,7 @@ function GLImageFilter(params) {
const DRAW = { INTERMEDIATE: 1 };
const gl = _canvas.getContext("webgl");
if (!gl)
throw new Error("Filter: getContext() failed");
throw new Error("filter: context failed");
this.addFilter = function(name) {
const args = Array.prototype.slice.call(arguments, 1);
const filter = _filter[name];
@ -4446,15 +4446,15 @@ function canvas(width, height) {
function process2(input, config3) {
let tensor3;
if (!input)
throw new Error("Human: Input is missing");
throw new Error("input is missing");
if (!(input instanceof tfjs_esm_exports.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
throw new Error("Human: Input type is not recognized");
throw new Error("input type is not recognized");
}
if (input instanceof tfjs_esm_exports.Tensor) {
if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3)
tensor3 = tfjs_esm_exports.clone(input);
else
throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
} else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
log("input stream is not ready");
@ -4485,7 +4485,7 @@ function process2(input, config3) {
else if ((config3.filter.width || 0) > 0)
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("Human: Input cannot determine dimension");
throw new Error("input cannot determine dimension");
if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const ctx = inCanvas.getContext("2d");
@ -4559,7 +4559,11 @@ function process2(input, config3) {
tempCanvas.height = targetHeight;
const tempCtx = tempCanvas.getContext("2d");
tempCtx == null ? void 0 : tempCtx.drawImage(outCanvas, 0, 0);
try {
pixels = tfjs_esm_exports.browser && env.browser ? tfjs_esm_exports.browser.fromPixels(tempCanvas) : null;
} catch (err) {
throw new Error("browser webgl error");
}
} else {
const tempCanvas = canvas(targetWidth, targetHeight);
if (!tempCanvas)
@ -4590,7 +4594,7 @@ function process2(input, config3) {
tfjs_esm_exports.dispose(casted);
} else {
tensor3 = tfjs_esm_exports.zeros([1, targetWidth, targetHeight, 3]);
throw new Error("Human: Cannot create tensor from input");
throw new Error("cannot create tensor from input");
}
}
}
@ -4655,7 +4659,7 @@ async function backendInfo() {
env.backends = Object.keys(tfjs_esm_exports.engine().registryFactory);
env.wasm.supported = typeof WebAssembly !== "undefined";
env.wasm.backend = env.backends.includes("wasm");
if (env.wasm.supported && env.wasm.backend) {
if (env.wasm.supported && env.wasm.backend && tfjs_esm_exports.getBackend() === "wasm") {
env.wasm.simd = await tfjs_esm_exports.env().getAsync("WASM_HAS_SIMD_SUPPORT");
env.wasm.multithread = await tfjs_esm_exports.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
@ -4663,8 +4667,8 @@ async function backendInfo() {
const ctx = c ? c.getContext("webgl2") : void 0;
env.webgl.supported = typeof ctx !== "undefined";
env.webgl.backend = env.backends.includes("webgl");
if (env.webgl.supported && env.webgl.backend) {
const gl = tfjs_esm_exports.backend().gpgpu !== "undefined" && tfjs_esm_exports.backend().getGPGPUContext ? await tfjs_esm_exports.backend().getGPGPUContext().gl : null;
if (env.webgl.supported && env.webgl.backend && (tfjs_esm_exports.getBackend() === "webgl" || tfjs_esm_exports.getBackend() === "humangl")) {
const gl = tfjs_esm_exports.backend().gpgpu !== "undefined" ? await tfjs_esm_exports.backend().getGPGPUContext().gl : null;
if (gl) {
env.webgl.version = gl.getParameter(gl.VERSION);
env.webgl.renderer = gl.getParameter(gl.RENDERER);
@ -9800,7 +9804,7 @@ async function load10(config3) {
const inputs = Object.values(model7.modelSignature["inputs"]);
model7.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
if (!model7.inputSize)
throw new Error(`Human: Cannot determine model inputSize: ${config3.object.modelPath}`);
throw new Error(`cannot determine model inputSize: ${config3.object.modelPath}`);
if (!model7 || !model7.modelUrl)
log("load model failed:", config3.object.modelPath);
else if (config3.debug)
@ -10534,7 +10538,7 @@ var options2 = {
var getCanvasContext = (input) => {
if (input && input.getContext)
return input.getContext("2d");
throw new Error("Human: Invalid Canvas");
throw new Error("invalid canvas");
};
var rad2deg = (theta) => Math.round(theta * 180 / Math.PI);
function point(ctx, x, y, z = 0, localOptions) {
@ -11176,10 +11180,7 @@ async function register(instance) {
var _a;
if (config2.name in tfjs_esm_exports.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) {
log("error: humangl backend invalid context");
log("resetting humangl backend");
reset(instance);
await tfjs_esm_exports.removeBackend(config2.name);
await register(instance);
}
if (!tfjs_esm_exports.findBackend(config2.name)) {
try {
@ -11192,14 +11193,10 @@ async function register(instance) {
config2.gl = (_a = config2.canvas) == null ? void 0 : _a.getContext("webgl2", config2.webGLattr);
if (config2.canvas) {
config2.canvas.addEventListener("webglcontextlost", async (e) => {
var _a2;
const err = (_a2 = config2.gl) == null ? void 0 : _a2.getError();
log("error: humangl context lost:", err, e);
log("gpu memory usage:", instance.tf.engine().backendInstance.numBytesInGPU);
log("resetting humangl backend");
env.initial = true;
reset(instance);
await tfjs_esm_exports.removeBackend(config2.name);
log("error: humangl:", e.type);
log("possible browser memory leak using webgl");
instance.emit("error");
throw new Error("browser webgl error");
});
config2.canvas.addEventListener("webglcontextrestored", (e) => {
log("error: humangl context restored:", e);
@ -11301,7 +11298,7 @@ async function check(instance) {
if (typeof (tfjs_esm_exports == null ? void 0 : tfjs_esm_exports.setWasmPaths) !== "undefined")
await tfjs_esm_exports.setWasmPaths(instance.config.wasmPath);
else
throw new Error("Human: WASM backend is not loaded");
throw new Error("wasm backend is not loaded");
const simd = await tfjs_esm_exports.env().getAsync("WASM_HAS_SIMD_SUPPORT");
const mt = await tfjs_esm_exports.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
if (instance.config.debug)
@ -11309,15 +11306,14 @@ async function check(instance) {
if (instance.config.debug && !simd)
log("warning: wasm simd support is not enabled");
}
await tfjs_esm_exports.setBackend(instance.config.backend);
try {
await tfjs_esm_exports.setBackend(instance.config.backend);
await tfjs_esm_exports.ready();
} catch (err) {
log("error: cannot set backend:", instance.config.backend, err);
return false;
}
}
tfjs_esm_exports.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
if (tfjs_esm_exports.getBackend() === "humangl") {
tfjs_esm_exports.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
tfjs_esm_exports.ENV.set("WEBGL_CPU_FORWARD", true);
@ -11338,6 +11334,7 @@ async function check(instance) {
get();
instance.env = env;
}
return true;
}
// package.json
@ -12279,6 +12276,9 @@ var Human = class {
match(faceEmbedding, db, threshold = 0) {
return match(faceEmbedding, db, threshold);
}
init() {
check(this);
}
async load(userConfig) {
this.state = "load";
const timeStamp = now();
@ -12290,7 +12290,8 @@ var Human = class {
log(`version: ${this.version}`);
if (this.config.debug)
log(`tfjs version: ${this.tf.version_core}`);
await check(this);
if (!await check(this))
log("error: backend check failed");
await tfjs_esm_exports.ready();
if (this.env.browser) {
if (this.config.debug)

File diff suppressed because one or more lines are too long

59
dist/human.esm.js vendored
View File

@ -44,7 +44,7 @@ function join(folder, file) {
const skipJoin = file.startsWith(".") || file.startsWith("/") || file.startsWith("http:") || file.startsWith("https:") || file.startsWith("file:");
const path = skipJoin ? `${file}` : `${folder}${separator}${file}`;
if (!path.toLocaleLowerCase().includes(".json"))
throw new Error(`Human: ModelPath Error: ${path} Expecting JSON file`);
throw new Error(`modelpath error: ${path} expecting json file`);
return path;
}
function log(...msg) {
@ -63788,7 +63788,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS))
throw new Error("Filter: GL compile failed", gl.getShaderInfoLog(shader));
throw new Error("filter: gl compile failed", gl.getShaderInfoLog(shader));
return shader;
};
this.uniform = {};
@ -63800,7 +63800,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.attachShader(this.id, _fsh);
gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS))
throw new Error("Filter: GL link failed", gl.getProgramInfoLog(this.id));
throw new Error("filter: gl link failed", gl.getProgramInfoLog(this.id));
gl.useProgram(this.id);
_collect(vertexSource, "attribute", this.attribute);
for (const a in this.attribute)
@ -63829,7 +63829,7 @@ function GLImageFilter(params) {
const DRAW = { INTERMEDIATE: 1 };
const gl = _canvas.getContext("webgl");
if (!gl)
throw new Error("Filter: getContext() failed");
throw new Error("filter: context failed");
this.addFilter = function(name) {
const args = Array.prototype.slice.call(arguments, 1);
const filter = _filter[name];
@ -64503,15 +64503,15 @@ function canvas(width, height) {
function process2(input2, config3) {
let tensor2;
if (!input2)
throw new Error("Human: Input is missing");
throw new Error("input is missing");
if (!(input2 instanceof Tensor) && !(typeof Image !== "undefined" && input2 instanceof Image) && !(typeof env2.Canvas !== "undefined" && input2 instanceof env2.Canvas) && !(typeof ImageData !== "undefined" && input2 instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input2 instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input2 instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input2 instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input2 instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input2 instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input2 instanceof OffscreenCanvas)) {
throw new Error("Human: Input type is not recognized");
throw new Error("input type is not recognized");
}
if (input2 instanceof Tensor) {
if (input2.shape && input2.shape.length === 4 && input2.shape[0] === 1 && input2.shape[3] === 3)
tensor2 = clone(input2);
else
throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${input2.shape}`);
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input2.shape}`);
} else {
if (typeof input2["readyState"] !== "undefined" && input2["readyState"] <= 2) {
log("input stream is not ready");
@ -64542,7 +64542,7 @@ function process2(input2, config3) {
else if ((config3.filter.width || 0) > 0)
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("Human: Input cannot determine dimension");
throw new Error("input cannot determine dimension");
if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const ctx = inCanvas.getContext("2d");
@ -64616,7 +64616,11 @@ function process2(input2, config3) {
tempCanvas.height = targetHeight;
const tempCtx = tempCanvas.getContext("2d");
tempCtx == null ? void 0 : tempCtx.drawImage(outCanvas, 0, 0);
try {
pixels = browser_exports && env2.browser ? browser_exports.fromPixels(tempCanvas) : null;
} catch (err) {
throw new Error("browser webgl error");
}
} else {
const tempCanvas = canvas(targetWidth, targetHeight);
if (!tempCanvas)
@ -64647,7 +64651,7 @@ function process2(input2, config3) {
dispose(casted);
} else {
tensor2 = zeros([1, targetWidth, targetHeight, 3]);
throw new Error("Human: Cannot create tensor from input");
throw new Error("cannot create tensor from input");
}
}
}
@ -64712,7 +64716,7 @@ async function backendInfo() {
env2.backends = Object.keys(engine().registryFactory);
env2.wasm.supported = typeof WebAssembly !== "undefined";
env2.wasm.backend = env2.backends.includes("wasm");
if (env2.wasm.supported && env2.wasm.backend) {
if (env2.wasm.supported && env2.wasm.backend && getBackend() === "wasm") {
env2.wasm.simd = await env().getAsync("WASM_HAS_SIMD_SUPPORT");
env2.wasm.multithread = await env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
@ -64720,8 +64724,8 @@ async function backendInfo() {
const ctx = c ? c.getContext("webgl2") : void 0;
env2.webgl.supported = typeof ctx !== "undefined";
env2.webgl.backend = env2.backends.includes("webgl");
if (env2.webgl.supported && env2.webgl.backend) {
const gl = backend().gpgpu !== "undefined" && backend().getGPGPUContext ? await backend().getGPGPUContext().gl : null;
if (env2.webgl.supported && env2.webgl.backend && (getBackend() === "webgl" || getBackend() === "humangl")) {
const gl = backend().gpgpu !== "undefined" ? await backend().getGPGPUContext().gl : null;
if (gl) {
env2.webgl.version = gl.getParameter(gl.VERSION);
env2.webgl.renderer = gl.getParameter(gl.RENDERER);
@ -69857,7 +69861,7 @@ async function load10(config3) {
const inputs = Object.values(model8.modelSignature["inputs"]);
model8.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
if (!model8.inputSize)
throw new Error(`Human: Cannot determine model inputSize: ${config3.object.modelPath}`);
throw new Error(`cannot determine model inputSize: ${config3.object.modelPath}`);
if (!model8 || !model8.modelUrl)
log("load model failed:", config3.object.modelPath);
else if (config3.debug)
@ -70591,7 +70595,7 @@ var options2 = {
var getCanvasContext = (input2) => {
if (input2 && input2.getContext)
return input2.getContext("2d");
throw new Error("Human: Invalid Canvas");
throw new Error("invalid canvas");
};
var rad2deg = (theta) => Math.round(theta * 180 / Math.PI);
function point(ctx, x, y, z = 0, localOptions) {
@ -71233,10 +71237,7 @@ async function register(instance) {
var _a;
if (config2.name in engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) {
log("error: humangl backend invalid context");
log("resetting humangl backend");
reset(instance);
await removeBackend(config2.name);
await register(instance);
}
if (!findBackend(config2.name)) {
try {
@ -71249,14 +71250,10 @@ async function register(instance) {
config2.gl = (_a = config2.canvas) == null ? void 0 : _a.getContext("webgl2", config2.webGLattr);
if (config2.canvas) {
config2.canvas.addEventListener("webglcontextlost", async (e) => {
var _a2;
const err = (_a2 = config2.gl) == null ? void 0 : _a2.getError();
log("error: humangl context lost:", err, e);
log("gpu memory usage:", instance.tf.engine().backendInstance.numBytesInGPU);
log("resetting humangl backend");
env2.initial = true;
reset(instance);
await removeBackend(config2.name);
log("error: humangl:", e.type);
log("possible browser memory leak using webgl");
instance.emit("error");
throw new Error("browser webgl error");
});
config2.canvas.addEventListener("webglcontextrestored", (e) => {
log("error: humangl context restored:", e);
@ -71358,7 +71355,7 @@ async function check(instance) {
if (typeof (tfjs_esm_exports == null ? void 0 : tfjs_esm_exports.setWasmPaths) !== "undefined")
await setWasmPaths(instance.config.wasmPath);
else
throw new Error("Human: WASM backend is not loaded");
throw new Error("wasm backend is not loaded");
const simd = await env().getAsync("WASM_HAS_SIMD_SUPPORT");
const mt = await env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
if (instance.config.debug)
@ -71366,15 +71363,14 @@ async function check(instance) {
if (instance.config.debug && !simd)
log("warning: wasm simd support is not enabled");
}
await setBackend(instance.config.backend);
try {
await setBackend(instance.config.backend);
await ready();
} catch (err) {
log("error: cannot set backend:", instance.config.backend, err);
return false;
}
}
ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
if (getBackend() === "humangl") {
ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
ENV.set("WEBGL_CPU_FORWARD", true);
@ -71395,6 +71391,7 @@ async function check(instance) {
get3();
instance.env = env2;
}
return true;
}
// package.json
@ -72336,6 +72333,9 @@ var Human = class {
match(faceEmbedding, db, threshold3 = 0) {
return match(faceEmbedding, db, threshold3);
}
init() {
check(this);
}
async load(userConfig) {
this.state = "load";
const timeStamp = now();
@ -72347,7 +72347,8 @@ var Human = class {
log(`version: ${this.version}`);
if (this.config.debug)
log(`tfjs version: ${this.tf.version_core}`);
await check(this);
if (!await check(this))
log("error: backend check failed");
await ready();
if (this.env.browser) {
if (this.config.debug)

File diff suppressed because one or more lines are too long

320
dist/human.js vendored

File diff suppressed because one or more lines are too long

View File

@ -100,7 +100,7 @@ function join(folder, file) {
const skipJoin = file.startsWith(".") || file.startsWith("/") || file.startsWith("http:") || file.startsWith("https:") || file.startsWith("file:");
const path = skipJoin ? `${file}` : `${folder}${separator}${file}`;
if (!path.toLocaleLowerCase().includes(".json"))
throw new Error(`Human: ModelPath Error: ${path} Expecting JSON file`);
throw new Error(`modelpath error: ${path} expecting json file`);
return path;
}
function log(...msg) {
@ -3765,7 +3765,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS))
throw new Error("Filter: GL compile failed", gl.getShaderInfoLog(shader));
throw new Error("filter: gl compile failed", gl.getShaderInfoLog(shader));
return shader;
};
this.uniform = {};
@ -3777,7 +3777,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.attachShader(this.id, _fsh);
gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS))
throw new Error("Filter: GL link failed", gl.getProgramInfoLog(this.id));
throw new Error("filter: gl link failed", gl.getProgramInfoLog(this.id));
gl.useProgram(this.id);
_collect(vertexSource, "attribute", this.attribute);
for (const a in this.attribute)
@ -3806,7 +3806,7 @@ function GLImageFilter(params) {
const DRAW = { INTERMEDIATE: 1 };
const gl = _canvas.getContext("webgl");
if (!gl)
throw new Error("Filter: getContext() failed");
throw new Error("filter: context failed");
this.addFilter = function(name) {
const args = Array.prototype.slice.call(arguments, 1);
const filter = _filter[name];
@ -4480,15 +4480,15 @@ function canvas(width, height) {
function process2(input, config3) {
let tensor3;
if (!input)
throw new Error("Human: Input is missing");
throw new Error("input is missing");
if (!(input instanceof tf3.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
throw new Error("Human: Input type is not recognized");
throw new Error("input type is not recognized");
}
if (input instanceof tf3.Tensor) {
if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3)
tensor3 = tf3.clone(input);
else
throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
} else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
log("input stream is not ready");
@ -4519,7 +4519,7 @@ function process2(input, config3) {
else if ((config3.filter.width || 0) > 0)
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("Human: Input cannot determine dimension");
throw new Error("input cannot determine dimension");
if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const ctx = inCanvas.getContext("2d");
@ -4593,7 +4593,11 @@ function process2(input, config3) {
tempCanvas.height = targetHeight;
const tempCtx = tempCanvas.getContext("2d");
tempCtx == null ? void 0 : tempCtx.drawImage(outCanvas, 0, 0);
try {
pixels = tf3.browser && env.browser ? tf3.browser.fromPixels(tempCanvas) : null;
} catch (err) {
throw new Error("browser webgl error");
}
} else {
const tempCanvas = canvas(targetWidth, targetHeight);
if (!tempCanvas)
@ -4624,7 +4628,7 @@ function process2(input, config3) {
tf3.dispose(casted);
} else {
tensor3 = tf3.zeros([1, targetWidth, targetHeight, 3]);
throw new Error("Human: Cannot create tensor from input");
throw new Error("cannot create tensor from input");
}
}
}
@ -4689,7 +4693,7 @@ async function backendInfo() {
env.backends = Object.keys(tf4.engine().registryFactory);
env.wasm.supported = typeof WebAssembly !== "undefined";
env.wasm.backend = env.backends.includes("wasm");
if (env.wasm.supported && env.wasm.backend) {
if (env.wasm.supported && env.wasm.backend && tf4.getBackend() === "wasm") {
env.wasm.simd = await tf4.env().getAsync("WASM_HAS_SIMD_SUPPORT");
env.wasm.multithread = await tf4.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
@ -4697,8 +4701,8 @@ async function backendInfo() {
const ctx = c ? c.getContext("webgl2") : void 0;
env.webgl.supported = typeof ctx !== "undefined";
env.webgl.backend = env.backends.includes("webgl");
if (env.webgl.supported && env.webgl.backend) {
const gl = tf4.backend().gpgpu !== "undefined" && tf4.backend().getGPGPUContext ? await tf4.backend().getGPGPUContext().gl : null;
if (env.webgl.supported && env.webgl.backend && (tf4.getBackend() === "webgl" || tf4.getBackend() === "humangl")) {
const gl = tf4.backend().gpgpu !== "undefined" ? await tf4.backend().getGPGPUContext().gl : null;
if (gl) {
env.webgl.version = gl.getParameter(gl.VERSION);
env.webgl.renderer = gl.getParameter(gl.RENDERER);
@ -9857,7 +9861,7 @@ async function load10(config3) {
const inputs = Object.values(model7.modelSignature["inputs"]);
model7.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
if (!model7.inputSize)
throw new Error(`Human: Cannot determine model inputSize: ${config3.object.modelPath}`);
throw new Error(`cannot determine model inputSize: ${config3.object.modelPath}`);
if (!model7 || !model7.modelUrl)
log("load model failed:", config3.object.modelPath);
else if (config3.debug)
@ -10594,7 +10598,7 @@ var options2 = {
var getCanvasContext = (input) => {
if (input && input.getContext)
return input.getContext("2d");
throw new Error("Human: Invalid Canvas");
throw new Error("invalid canvas");
};
var rad2deg = (theta) => Math.round(theta * 180 / Math.PI);
function point(ctx, x, y, z = 0, localOptions) {
@ -11237,10 +11241,7 @@ async function register(instance) {
var _a;
if (config2.name in tf21.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) {
log("error: humangl backend invalid context");
log("resetting humangl backend");
reset(instance);
await tf21.removeBackend(config2.name);
await register(instance);
}
if (!tf21.findBackend(config2.name)) {
try {
@ -11253,14 +11254,10 @@ async function register(instance) {
config2.gl = (_a = config2.canvas) == null ? void 0 : _a.getContext("webgl2", config2.webGLattr);
if (config2.canvas) {
config2.canvas.addEventListener("webglcontextlost", async (e) => {
var _a2;
const err = (_a2 = config2.gl) == null ? void 0 : _a2.getError();
log("error: humangl context lost:", err, e);
log("gpu memory usage:", instance.tf.engine().backendInstance.numBytesInGPU);
log("resetting humangl backend");
env.initial = true;
reset(instance);
await tf21.removeBackend(config2.name);
log("error: humangl:", e.type);
log("possible browser memory leak using webgl");
instance.emit("error");
throw new Error("browser webgl error");
});
config2.canvas.addEventListener("webglcontextrestored", (e) => {
log("error: humangl context restored:", e);
@ -11363,7 +11360,7 @@ async function check(instance) {
if (typeof (tf22 == null ? void 0 : tf22.setWasmPaths) !== "undefined")
await tf22.setWasmPaths(instance.config.wasmPath);
else
throw new Error("Human: WASM backend is not loaded");
throw new Error("wasm backend is not loaded");
const simd = await tf22.env().getAsync("WASM_HAS_SIMD_SUPPORT");
const mt = await tf22.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
if (instance.config.debug)
@ -11371,15 +11368,14 @@ async function check(instance) {
if (instance.config.debug && !simd)
log("warning: wasm simd support is not enabled");
}
await tf22.setBackend(instance.config.backend);
try {
await tf22.setBackend(instance.config.backend);
await tf22.ready();
} catch (err) {
log("error: cannot set backend:", instance.config.backend, err);
return false;
}
}
tf22.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
if (tf22.getBackend() === "humangl") {
tf22.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
tf22.ENV.set("WEBGL_CPU_FORWARD", true);
@ -11400,6 +11396,7 @@ async function check(instance) {
get();
instance.env = env;
}
return true;
}
// package.json
@ -12342,6 +12339,9 @@ var Human = class {
match(faceEmbedding, db, threshold = 0) {
return match(faceEmbedding, db, threshold);
}
init() {
check(this);
}
async load(userConfig) {
this.state = "load";
const timeStamp = now();
@ -12353,7 +12353,8 @@ var Human = class {
log(`version: ${this.version}`);
if (this.config.debug)
log(`tfjs version: ${this.tf.version_core}`);
await check(this);
if (!await check(this))
log("error: backend check failed");
await tf24.ready();
if (this.env.browser) {
if (this.config.debug)

View File

@ -101,7 +101,7 @@ function join(folder, file) {
const skipJoin = file.startsWith(".") || file.startsWith("/") || file.startsWith("http:") || file.startsWith("https:") || file.startsWith("file:");
const path = skipJoin ? `${file}` : `${folder}${separator}${file}`;
if (!path.toLocaleLowerCase().includes(".json"))
throw new Error(`Human: ModelPath Error: ${path} Expecting JSON file`);
throw new Error(`modelpath error: ${path} expecting json file`);
return path;
}
function log(...msg) {
@ -3766,7 +3766,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS))
throw new Error("Filter: GL compile failed", gl.getShaderInfoLog(shader));
throw new Error("filter: gl compile failed", gl.getShaderInfoLog(shader));
return shader;
};
this.uniform = {};
@ -3778,7 +3778,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.attachShader(this.id, _fsh);
gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS))
throw new Error("Filter: GL link failed", gl.getProgramInfoLog(this.id));
throw new Error("filter: gl link failed", gl.getProgramInfoLog(this.id));
gl.useProgram(this.id);
_collect(vertexSource, "attribute", this.attribute);
for (const a in this.attribute)
@ -3807,7 +3807,7 @@ function GLImageFilter(params) {
const DRAW = { INTERMEDIATE: 1 };
const gl = _canvas.getContext("webgl");
if (!gl)
throw new Error("Filter: getContext() failed");
throw new Error("filter: context failed");
this.addFilter = function(name) {
const args = Array.prototype.slice.call(arguments, 1);
const filter = _filter[name];
@ -4481,15 +4481,15 @@ function canvas(width, height) {
function process2(input, config3) {
let tensor3;
if (!input)
throw new Error("Human: Input is missing");
throw new Error("input is missing");
if (!(input instanceof tf3.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
throw new Error("Human: Input type is not recognized");
throw new Error("input type is not recognized");
}
if (input instanceof tf3.Tensor) {
if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3)
tensor3 = tf3.clone(input);
else
throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
} else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
log("input stream is not ready");
@ -4520,7 +4520,7 @@ function process2(input, config3) {
else if ((config3.filter.width || 0) > 0)
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("Human: Input cannot determine dimension");
throw new Error("input cannot determine dimension");
if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const ctx = inCanvas.getContext("2d");
@ -4594,7 +4594,11 @@ function process2(input, config3) {
tempCanvas.height = targetHeight;
const tempCtx = tempCanvas.getContext("2d");
tempCtx == null ? void 0 : tempCtx.drawImage(outCanvas, 0, 0);
try {
pixels = tf3.browser && env.browser ? tf3.browser.fromPixels(tempCanvas) : null;
} catch (err) {
throw new Error("browser webgl error");
}
} else {
const tempCanvas = canvas(targetWidth, targetHeight);
if (!tempCanvas)
@ -4625,7 +4629,7 @@ function process2(input, config3) {
tf3.dispose(casted);
} else {
tensor3 = tf3.zeros([1, targetWidth, targetHeight, 3]);
throw new Error("Human: Cannot create tensor from input");
throw new Error("cannot create tensor from input");
}
}
}
@ -4690,7 +4694,7 @@ async function backendInfo() {
env.backends = Object.keys(tf4.engine().registryFactory);
env.wasm.supported = typeof WebAssembly !== "undefined";
env.wasm.backend = env.backends.includes("wasm");
if (env.wasm.supported && env.wasm.backend) {
if (env.wasm.supported && env.wasm.backend && tf4.getBackend() === "wasm") {
env.wasm.simd = await tf4.env().getAsync("WASM_HAS_SIMD_SUPPORT");
env.wasm.multithread = await tf4.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
@ -4698,8 +4702,8 @@ async function backendInfo() {
const ctx = c ? c.getContext("webgl2") : void 0;
env.webgl.supported = typeof ctx !== "undefined";
env.webgl.backend = env.backends.includes("webgl");
if (env.webgl.supported && env.webgl.backend) {
const gl = tf4.backend().gpgpu !== "undefined" && tf4.backend().getGPGPUContext ? await tf4.backend().getGPGPUContext().gl : null;
if (env.webgl.supported && env.webgl.backend && (tf4.getBackend() === "webgl" || tf4.getBackend() === "humangl")) {
const gl = tf4.backend().gpgpu !== "undefined" ? await tf4.backend().getGPGPUContext().gl : null;
if (gl) {
env.webgl.version = gl.getParameter(gl.VERSION);
env.webgl.renderer = gl.getParameter(gl.RENDERER);
@ -9858,7 +9862,7 @@ async function load10(config3) {
const inputs = Object.values(model7.modelSignature["inputs"]);
model7.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
if (!model7.inputSize)
throw new Error(`Human: Cannot determine model inputSize: ${config3.object.modelPath}`);
throw new Error(`cannot determine model inputSize: ${config3.object.modelPath}`);
if (!model7 || !model7.modelUrl)
log("load model failed:", config3.object.modelPath);
else if (config3.debug)
@ -10595,7 +10599,7 @@ var options2 = {
var getCanvasContext = (input) => {
if (input && input.getContext)
return input.getContext("2d");
throw new Error("Human: Invalid Canvas");
throw new Error("invalid canvas");
};
var rad2deg = (theta) => Math.round(theta * 180 / Math.PI);
function point(ctx, x, y, z = 0, localOptions) {
@ -11238,10 +11242,7 @@ async function register(instance) {
var _a;
if (config2.name in tf21.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) {
log("error: humangl backend invalid context");
log("resetting humangl backend");
reset(instance);
await tf21.removeBackend(config2.name);
await register(instance);
}
if (!tf21.findBackend(config2.name)) {
try {
@ -11254,14 +11255,10 @@ async function register(instance) {
config2.gl = (_a = config2.canvas) == null ? void 0 : _a.getContext("webgl2", config2.webGLattr);
if (config2.canvas) {
config2.canvas.addEventListener("webglcontextlost", async (e) => {
var _a2;
const err = (_a2 = config2.gl) == null ? void 0 : _a2.getError();
log("error: humangl context lost:", err, e);
log("gpu memory usage:", instance.tf.engine().backendInstance.numBytesInGPU);
log("resetting humangl backend");
env.initial = true;
reset(instance);
await tf21.removeBackend(config2.name);
log("error: humangl:", e.type);
log("possible browser memory leak using webgl");
instance.emit("error");
throw new Error("browser webgl error");
});
config2.canvas.addEventListener("webglcontextrestored", (e) => {
log("error: humangl context restored:", e);
@ -11364,7 +11361,7 @@ async function check(instance) {
if (typeof (tf22 == null ? void 0 : tf22.setWasmPaths) !== "undefined")
await tf22.setWasmPaths(instance.config.wasmPath);
else
throw new Error("Human: WASM backend is not loaded");
throw new Error("wasm backend is not loaded");
const simd = await tf22.env().getAsync("WASM_HAS_SIMD_SUPPORT");
const mt = await tf22.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
if (instance.config.debug)
@ -11372,15 +11369,14 @@ async function check(instance) {
if (instance.config.debug && !simd)
log("warning: wasm simd support is not enabled");
}
await tf22.setBackend(instance.config.backend);
try {
await tf22.setBackend(instance.config.backend);
await tf22.ready();
} catch (err) {
log("error: cannot set backend:", instance.config.backend, err);
return false;
}
}
tf22.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
if (tf22.getBackend() === "humangl") {
tf22.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
tf22.ENV.set("WEBGL_CPU_FORWARD", true);
@ -11401,6 +11397,7 @@ async function check(instance) {
get();
instance.env = env;
}
return true;
}
// package.json
@ -12343,6 +12340,9 @@ var Human = class {
match(faceEmbedding, db, threshold = 0) {
return match(faceEmbedding, db, threshold);
}
init() {
check(this);
}
async load(userConfig) {
this.state = "load";
const timeStamp = now();
@ -12354,7 +12354,8 @@ var Human = class {
log(`version: ${this.version}`);
if (this.config.debug)
log(`tfjs version: ${this.tf.version_core}`);
await check(this);
if (!await check(this))
log("error: backend check failed");
await tf24.ready();
if (this.env.browser) {
if (this.config.debug)

59
dist/human.node.js vendored
View File

@ -100,7 +100,7 @@ function join(folder, file) {
const skipJoin = file.startsWith(".") || file.startsWith("/") || file.startsWith("http:") || file.startsWith("https:") || file.startsWith("file:");
const path = skipJoin ? `${file}` : `${folder}${separator}${file}`;
if (!path.toLocaleLowerCase().includes(".json"))
throw new Error(`Human: ModelPath Error: ${path} Expecting JSON file`);
throw new Error(`modelpath error: ${path} expecting json file`);
return path;
}
function log(...msg) {
@ -3765,7 +3765,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS))
throw new Error("Filter: GL compile failed", gl.getShaderInfoLog(shader));
throw new Error("filter: gl compile failed", gl.getShaderInfoLog(shader));
return shader;
};
this.uniform = {};
@ -3777,7 +3777,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.attachShader(this.id, _fsh);
gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS))
throw new Error("Filter: GL link failed", gl.getProgramInfoLog(this.id));
throw new Error("filter: gl link failed", gl.getProgramInfoLog(this.id));
gl.useProgram(this.id);
_collect(vertexSource, "attribute", this.attribute);
for (const a in this.attribute)
@ -3806,7 +3806,7 @@ function GLImageFilter(params) {
const DRAW = { INTERMEDIATE: 1 };
const gl = _canvas.getContext("webgl");
if (!gl)
throw new Error("Filter: getContext() failed");
throw new Error("filter: context failed");
this.addFilter = function(name) {
const args = Array.prototype.slice.call(arguments, 1);
const filter = _filter[name];
@ -4480,15 +4480,15 @@ function canvas(width, height) {
function process2(input, config3) {
let tensor3;
if (!input)
throw new Error("Human: Input is missing");
throw new Error("input is missing");
if (!(input instanceof tf3.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
throw new Error("Human: Input type is not recognized");
throw new Error("input type is not recognized");
}
if (input instanceof tf3.Tensor) {
if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3)
tensor3 = tf3.clone(input);
else
throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input.shape}`);
} else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
log("input stream is not ready");
@ -4519,7 +4519,7 @@ function process2(input, config3) {
else if ((config3.filter.width || 0) > 0)
targetHeight = originalHeight * ((config3.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight)
throw new Error("Human: Input cannot determine dimension");
throw new Error("input cannot determine dimension");
if (!inCanvas || (inCanvas == null ? void 0 : inCanvas.width) !== targetWidth || (inCanvas == null ? void 0 : inCanvas.height) !== targetHeight)
inCanvas = canvas(targetWidth, targetHeight);
const ctx = inCanvas.getContext("2d");
@ -4593,7 +4593,11 @@ function process2(input, config3) {
tempCanvas.height = targetHeight;
const tempCtx = tempCanvas.getContext("2d");
tempCtx == null ? void 0 : tempCtx.drawImage(outCanvas, 0, 0);
try {
pixels = tf3.browser && env.browser ? tf3.browser.fromPixels(tempCanvas) : null;
} catch (err) {
throw new Error("browser webgl error");
}
} else {
const tempCanvas = canvas(targetWidth, targetHeight);
if (!tempCanvas)
@ -4624,7 +4628,7 @@ function process2(input, config3) {
tf3.dispose(casted);
} else {
tensor3 = tf3.zeros([1, targetWidth, targetHeight, 3]);
throw new Error("Human: Cannot create tensor from input");
throw new Error("cannot create tensor from input");
}
}
}
@ -4689,7 +4693,7 @@ async function backendInfo() {
env.backends = Object.keys(tf4.engine().registryFactory);
env.wasm.supported = typeof WebAssembly !== "undefined";
env.wasm.backend = env.backends.includes("wasm");
if (env.wasm.supported && env.wasm.backend) {
if (env.wasm.supported && env.wasm.backend && tf4.getBackend() === "wasm") {
env.wasm.simd = await tf4.env().getAsync("WASM_HAS_SIMD_SUPPORT");
env.wasm.multithread = await tf4.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
@ -4697,8 +4701,8 @@ async function backendInfo() {
const ctx = c ? c.getContext("webgl2") : void 0;
env.webgl.supported = typeof ctx !== "undefined";
env.webgl.backend = env.backends.includes("webgl");
if (env.webgl.supported && env.webgl.backend) {
const gl = tf4.backend().gpgpu !== "undefined" && tf4.backend().getGPGPUContext ? await tf4.backend().getGPGPUContext().gl : null;
if (env.webgl.supported && env.webgl.backend && (tf4.getBackend() === "webgl" || tf4.getBackend() === "humangl")) {
const gl = tf4.backend().gpgpu !== "undefined" ? await tf4.backend().getGPGPUContext().gl : null;
if (gl) {
env.webgl.version = gl.getParameter(gl.VERSION);
env.webgl.renderer = gl.getParameter(gl.RENDERER);
@ -9857,7 +9861,7 @@ async function load10(config3) {
const inputs = Object.values(model7.modelSignature["inputs"]);
model7.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
if (!model7.inputSize)
throw new Error(`Human: Cannot determine model inputSize: ${config3.object.modelPath}`);
throw new Error(`cannot determine model inputSize: ${config3.object.modelPath}`);
if (!model7 || !model7.modelUrl)
log("load model failed:", config3.object.modelPath);
else if (config3.debug)
@ -10594,7 +10598,7 @@ var options2 = {
var getCanvasContext = (input) => {
if (input && input.getContext)
return input.getContext("2d");
throw new Error("Human: Invalid Canvas");
throw new Error("invalid canvas");
};
var rad2deg = (theta) => Math.round(theta * 180 / Math.PI);
function point(ctx, x, y, z = 0, localOptions) {
@ -11237,10 +11241,7 @@ async function register(instance) {
var _a;
if (config2.name in tf21.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) {
log("error: humangl backend invalid context");
log("resetting humangl backend");
reset(instance);
await tf21.removeBackend(config2.name);
await register(instance);
}
if (!tf21.findBackend(config2.name)) {
try {
@ -11253,14 +11254,10 @@ async function register(instance) {
config2.gl = (_a = config2.canvas) == null ? void 0 : _a.getContext("webgl2", config2.webGLattr);
if (config2.canvas) {
config2.canvas.addEventListener("webglcontextlost", async (e) => {
var _a2;
const err = (_a2 = config2.gl) == null ? void 0 : _a2.getError();
log("error: humangl context lost:", err, e);
log("gpu memory usage:", instance.tf.engine().backendInstance.numBytesInGPU);
log("resetting humangl backend");
env.initial = true;
reset(instance);
await tf21.removeBackend(config2.name);
log("error: humangl:", e.type);
log("possible browser memory leak using webgl");
instance.emit("error");
throw new Error("browser webgl error");
});
config2.canvas.addEventListener("webglcontextrestored", (e) => {
log("error: humangl context restored:", e);
@ -11363,7 +11360,7 @@ async function check(instance) {
if (typeof (tf22 == null ? void 0 : tf22.setWasmPaths) !== "undefined")
await tf22.setWasmPaths(instance.config.wasmPath);
else
throw new Error("Human: WASM backend is not loaded");
throw new Error("wasm backend is not loaded");
const simd = await tf22.env().getAsync("WASM_HAS_SIMD_SUPPORT");
const mt = await tf22.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
if (instance.config.debug)
@ -11371,15 +11368,14 @@ async function check(instance) {
if (instance.config.debug && !simd)
log("warning: wasm simd support is not enabled");
}
await tf22.setBackend(instance.config.backend);
try {
await tf22.setBackend(instance.config.backend);
await tf22.ready();
} catch (err) {
log("error: cannot set backend:", instance.config.backend, err);
return false;
}
}
tf22.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
if (tf22.getBackend() === "humangl") {
tf22.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
tf22.ENV.set("WEBGL_CPU_FORWARD", true);
@ -11400,6 +11396,7 @@ async function check(instance) {
get();
instance.env = env;
}
return true;
}
// package.json
@ -12342,6 +12339,9 @@ var Human = class {
match(faceEmbedding, db, threshold = 0) {
return match(faceEmbedding, db, threshold);
}
init() {
check(this);
}
async load(userConfig) {
this.state = "load";
const timeStamp = now();
@ -12353,7 +12353,8 @@ var Human = class {
log(`version: ${this.version}`);
if (this.config.debug)
log(`tfjs version: ${this.tf.version_core}`);
await check(this);
if (!await check(this))
log("error: backend check failed");
await tf24.ready();
if (this.env.browser) {
if (this.config.debug)

View File

@ -68,7 +68,7 @@ export const options: DrawOptions = {
const getCanvasContext = (input) => {
if (input && input.getContext) return input.getContext('2d');
throw new Error('Human: Invalid Canvas');
throw new Error('invalid canvas');
};
const rad2deg = (theta) => Math.round((theta * 180) / Math.PI);

View File

@ -93,18 +93,19 @@ export async function backendInfo() {
env.backends = Object.keys(tf.engine().registryFactory);
env.wasm.supported = typeof WebAssembly !== 'undefined';
env.wasm.backend = env.backends.includes('wasm');
if (env.wasm.supported && env.wasm.backend) {
if (env.wasm.supported && env.wasm.backend && tf.getBackend() === 'wasm') {
env.wasm.simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
env.wasm.multithread = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
}
const c = image.canvas(100, 100);
const ctx = c ? c.getContext('webgl2') : undefined;
const ctx = c ? c.getContext('webgl2') : undefined; // causes too many gl contexts
// const ctx = typeof tf.backend().getGPGPUContext !== undefined ? tf.backend().getGPGPUContext : null;
env.webgl.supported = typeof ctx !== 'undefined';
env.webgl.backend = env.backends.includes('webgl');
if (env.webgl.supported && env.webgl.backend) {
if (env.webgl.supported && env.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {
// @ts-ignore getGPGPUContext only exists on WebGL backend
const gl = (tf.backend().gpgpu !== 'undefined') && (tf.backend().getGPGPUContext) ? await tf.backend().getGPGPUContext().gl : null;
const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null;
if (gl) {
env.webgl.version = gl.getParameter(gl.VERSION);
env.webgl.renderer = gl.getParameter(gl.RENDERER);

View File

@ -7,7 +7,7 @@ export function join(folder: string, file: string): string {
const separator = folder.endsWith('/') ? '' : '/';
const skipJoin = file.startsWith('.') || file.startsWith('/') || file.startsWith('http:') || file.startsWith('https:') || file.startsWith('file:');
const path = skipJoin ? `${file}` : `${folder}${separator}${file}`;
if (!path.toLocaleLowerCase().includes('.json')) throw new Error(`Human: ModelPath Error: ${path} Expecting JSON file`);
if (!path.toLocaleLowerCase().includes('.json')) throw new Error(`modelpath error: ${path} expecting json file`);
return path;
}

View File

@ -43,13 +43,14 @@ export { env } from './env';
export type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
/** Events dispatched by `human.events`
*
* - `create`: triggered when Human object is instantiated
* - `load`: triggered when models are loaded (explicitly or on-demand)
* - `image`: triggered when input image is this.processed
* - `result`: triggered when detection is complete
* - `warmup`: triggered when warmup is complete
*/
export type Events = 'create' | 'load' | 'image' | 'result' | 'warmup';
export type Events = 'create' | 'load' | 'image' | 'result' | 'warmup' | 'error';
/** Error message
* @typedef Error Type
@ -61,8 +62,7 @@ export type Error = { error: string };
*/
export type TensorFlow = typeof tf;
/**
* **Human** library main class
/** **Human** library main class
*
* All methods and properties are available only as members of Human class
*
@ -71,6 +71,7 @@ export type TensorFlow = typeof tf;
* - Possible inputs: {@link Input}
*
* @param userConfig: {@link Config}
* @return instance
*/
export class Human {
/** Current version of Human library in *semver* format */
@ -95,14 +96,13 @@ export class Human {
/** currenty processed image tensor and canvas */
process: { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null };
/** @internal: Instance of TensorFlow/JS used by Human
/** Instance of TensorFlow/JS used by Human
* - Can be embedded or externally provided
* @internal
*/
tf: TensorFlow;
/**
* Object containing environment information used for diagnostics
*/
/** Object containing environment information used for diagnostics */
env: env.Env;
/** Draw helper classes that can draw detected objects on canvas using specified draw
@ -113,10 +113,11 @@ export class Human {
* - canvas: draw this.processed canvas which is a this.processed copy of the input
* - all: meta-function that performs: canvas, face, body, hand
*/
// draw: typeof draw;
draw: { canvas, face, body, hand, gesture, object, person, all, options: DrawOptions };
/** @internal: Currently loaded models */
/** Currently loaded models
* @internal
*/
models: {
face: [unknown, GraphModel | null, GraphModel | null] | null,
posenet: GraphModel | null,
@ -142,6 +143,7 @@ export class Human {
* - `image`: triggered when input image is this.processed
* - `result`: triggered when detection is complete
* - `warmup`: triggered when warmup is complete
* - `error`: triggered on some errors
*/
events: EventTarget;
/** Reference face triangualtion array of 468 points, used for triangle references between points */
@ -157,9 +159,11 @@ export class Human {
gl: Record<string, unknown>;
// definition end
/**
* Creates instance of Human library that is futher used for all operations
/** Constructor for **Human** library that is futher used for all operations
*
* @param userConfig: {@link Config}
*
* @return instance
*/
constructor(userConfig?: Partial<Config>) {
env.get();
@ -252,7 +256,8 @@ export class Human {
image = (input: Input) => image.process(input, this.config);
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
* - Calculation is based on normalized Minkowski distance between
* - Calculation is based on normalized Minkowski distance between two descriptors
* - Default is Euclidean distance which is Minkowski distance of 2nd order
*
* @param embedding1: face descriptor as array of numbers
* @param embedding2: face descriptor as array of numbers
@ -263,10 +268,9 @@ export class Human {
return faceres.similarity(embedding1, embedding2);
}
/**
* Segmentation method takes any input and returns this.processed canvas with body segmentation
* Optional parameter background is used to fill the background with specific input
* Segmentation is not triggered as part of detect this.process
/** Segmentation method takes any input and returns this.processed canvas with body segmentation
* - Optional parameter background is used to fill the background with specific input
* - Segmentation is not triggered as part of detect this.process
*
* @param input: {@link Input}
* @param background?: {@link Input}
@ -276,7 +280,8 @@ export class Human {
return input ? segmentation.process(input, background, this.config) : null;
}
/** Enhance method performs additional enhacements to face image previously detected for futher this.processing
/** Enhance method performs additional enhacements to face image previously detected for futher processing
*
* @param input: Tensor as provided in human.result.face[n].tensor
* @returns Tensor
*/
@ -286,6 +291,7 @@ export class Human {
}
/** Math method find best match between provided face descriptor and predefined database of known descriptors
*
* @param faceEmbedding: face descriptor previsouly calculated on any face
* @param db: array of mapping of face descriptors to known values
* @param threshold: minimum score for matching to be considered in the result
@ -296,9 +302,22 @@ export class Human {
return faceres.match(faceEmbedding, db, threshold);
}
/** Explicit backend initialization
* - Normally done implicitly during initial load phase
* - Call to explictly register and initialize TFJS backend without any other operations
* - Used in webworker environments where there can be multiple instances of Human and not all initialized
*
* @return Promise<void>
*/
init() {
backend.check(this);
}
/** Load method preloads all configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run
*
* @param userConfig?: {@link Config}
* @return Promise<void>
*/
async load(userConfig?: Partial<Config>) {
this.state = 'load';
@ -309,7 +328,7 @@ export class Human {
if (env.env.initial) { // print version info on first run and check for correct backend setup
if (this.config.debug) log(`version: ${this.version}`);
if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`);
await backend.check(this);
if (!await backend.check(this)) log('error: backend check failed');
await tf.ready();
if (this.env.browser) {
if (this.config.debug) log('configuration:', this.config);
@ -335,8 +354,7 @@ export class Human {
/** @hidden */
emit = (event: string) => this.events?.dispatchEvent(new Event(event));
/**
* Runs interpolation using last known result and returns smoothened result
/** Runs interpolation using last known result and returns smoothened result
* Interpolation is based on time since last known result so can be called independently
*
* @param result?: {@link Result} optional use specific result set to run interpolation on
@ -536,7 +554,5 @@ export class Human {
}
}
/**
* Class Human is also available as default export
*/
/** Class Human as default export */
export { Human as default };

View File

@ -32,7 +32,7 @@ export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas {
// @ts-ignore // env.canvas is an external monkey-patch
c = (typeof env.Canvas !== 'undefined') ? new env.Canvas(width, height) : null;
}
// if (!c) throw new Error('Human: Cannot create canvas');
// if (!c) throw new Error('cannot create canvas');
return c;
}
@ -41,7 +41,7 @@ export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas {
// input is resized and run through imagefx filter
export function process(input: Input, config: Config): { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement } {
let tensor;
if (!input) throw new Error('Human: Input is missing');
if (!input) throw new Error('input is missing');
// sanity checks since different browsers do not implement all dom elements
if (
!(input instanceof tf.Tensor)
@ -55,12 +55,12 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
&& !(typeof HTMLCanvasElement !== 'undefined' && input instanceof HTMLCanvasElement)
&& !(typeof OffscreenCanvas !== 'undefined' && input instanceof OffscreenCanvas)
) {
throw new Error('Human: Input type is not recognized');
throw new Error('input type is not recognized');
}
if (input instanceof tf.Tensor) {
// if input is tensor, use as-is
if ((input as unknown as Tensor).shape && (input as unknown as Tensor).shape.length === 4 && (input as unknown as Tensor).shape[0] === 1 && (input as unknown as Tensor).shape[3] === 3) tensor = tf.clone(input);
else throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${(input as unknown as Tensor).shape}`);
else throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${(input as unknown as Tensor).shape}`);
} else {
// check if resizing will be needed
if (typeof input['readyState'] !== 'undefined' && input['readyState'] <= 2) {
@ -89,7 +89,7 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
else if ((config.filter.height || 0) > 0) targetWidth = originalWidth * ((config.filter.height || 0) / originalHeight);
if ((config.filter.height || 0) > 0) targetHeight = config.filter.height;
else if ((config.filter.width || 0) > 0) targetHeight = originalHeight * ((config.filter.width || 0) / originalWidth);
if (!targetWidth || !targetHeight) throw new Error('Human: Input cannot determine dimension');
if (!targetWidth || !targetHeight) throw new Error('input cannot determine dimension');
if (!inCanvas || (inCanvas?.width !== targetWidth) || (inCanvas?.height !== targetHeight)) inCanvas = canvas(targetWidth, targetHeight);
// draw input to our canvas
@ -176,7 +176,11 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
tempCanvas.height = targetHeight;
const tempCtx = tempCanvas.getContext('2d');
tempCtx?.drawImage(outCanvas, 0, 0);
try {
pixels = (tf.browser && env.browser) ? tf.browser.fromPixels(tempCanvas) : null;
} catch (err) {
throw new Error('browser webgl error');
}
} else { // cpu and wasm kernel does not implement efficient fromPixels method
// we cant use canvas as-is as it already has a context, so we do a silly one more canvas and do fromPixels on ImageData instead
const tempCanvas = canvas(targetWidth, targetHeight);
@ -206,7 +210,7 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
tf.dispose(casted);
} else {
tensor = tf.zeros([1, targetWidth, targetHeight, 3]);
throw new Error('Human: Cannot create tensor from input');
throw new Error('cannot create tensor from input');
}
}
}

View File

@ -17,7 +17,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) throw new Error('filter: gl compile failed', gl.getShaderInfoLog(shader));
return shader;
};
@ -30,7 +30,7 @@ function GLProgram(gl, vertexSource, fragmentSource) {
gl.attachShader(this.id, _fsh);
gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) throw new Error('filter: gl link failed', gl.getProgramInfoLog(this.id));
gl.useProgram(this.id);
// Collect attributes
@ -61,7 +61,7 @@ export function GLImageFilter(params) {
const _shaderProgramCache = { };
const DRAW = { INTERMEDIATE: 1 };
const gl = _canvas.getContext('webgl');
if (!gl) throw new Error('Filter: getContext() failed');
if (!gl) throw new Error('filter: context failed');
this.addFilter = function (name) {
// eslint-disable-next-line prefer-rest-params

View File

@ -21,7 +21,7 @@ export async function load(config: Config): Promise<GraphModel> {
model = await tf.loadGraphModel(join(config.modelBasePath, config.object.modelPath || ''));
const inputs = Object.values(model.modelSignature['inputs']);
model.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
if (!model.inputSize) throw new Error(`Human: Cannot determine model inputSize: ${config.object.modelPath}`);
if (!model.inputSize) throw new Error(`cannot determine model inputSize: ${config.object.modelPath}`);
if (!model || !model.modelUrl) log('load model failed:', config.object.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
} else if (config.debug) log('cached model:', model.modelUrl);

View File

@ -53,25 +53,22 @@ export async function check(instance) {
if (instance.config.backend === 'wasm') {
if (instance.config.debug) log('wasm path:', instance.config.wasmPath);
if (typeof tf?.setWasmPaths !== 'undefined') await tf.setWasmPaths(instance.config.wasmPath);
else throw new Error('Human: WASM backend is not loaded');
else throw new Error('wasm backend is not loaded');
const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
const mt = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
if (instance.config.debug) log(`wasm execution: ${simd ? 'SIMD' : 'no SIMD'} ${mt ? 'multithreaded' : 'singlethreaded'}`);
if (instance.config.debug && !simd) log('warning: wasm simd support is not enabled');
}
await tf.setBackend(instance.config.backend);
try {
await tf.setBackend(instance.config.backend);
await tf.ready();
} catch (err) {
log('error: cannot set backend:', instance.config.backend, err);
return false;
}
}
tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0);
// handle webgl & humangl
if (tf.getBackend() === 'humangl') {
tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
@ -97,4 +94,5 @@ export async function check(instance) {
env.get(); // update env on backend init
instance.env = env.env;
}
return true;
}

View File

@ -4,10 +4,10 @@
*/
import { log } from '../helpers';
import { env } from '../env';
import * as models from '../models';
import * as tf from '../../dist/tfjs.esm.js';
import * as image from '../image/image';
import * as models from '../models';
// import { env } from '../env';
export const config = {
name: 'humangl',
@ -47,10 +47,12 @@ export async function register(instance): Promise<void> {
// force backend reload if gl context is not valid
if ((config.name in tf.engine().registry) && (!config.gl || !config.gl.getParameter(config.gl.VERSION))) {
log('error: humangl backend invalid context');
log('resetting humangl backend');
models.reset(instance);
/*
log('resetting humangl backend');
await tf.removeBackend(config.name);
await register(instance); // re-register
*/
}
if (!tf.findBackend(config.name)) {
try {
@ -63,14 +65,18 @@ export async function register(instance): Promise<void> {
config.gl = config.canvas?.getContext('webgl2', config.webGLattr) as WebGL2RenderingContext;
if (config.canvas) {
config.canvas.addEventListener('webglcontextlost', async (e) => {
const err = config.gl?.getError();
log('error: humangl context lost:', err, e);
log('gpu memory usage:', instance.tf.engine().backendInstance.numBytesInGPU);
log('error: humangl:', e.type);
// log('gpu memory usage:', instance.tf.engine().backendInstance.numBytesInGPU);
log('possible browser memory leak using webgl');
instance.emit('error');
throw new Error('browser webgl error');
/*
log('resetting humangl backend');
env.initial = true;
models.reset(instance);
await tf.removeBackend(config.name);
// await register(instance); // re-register
await register(instance); // re-register
*/
});
config.canvas.addEventListener('webglcontextrestored', (e) => {
log('error: humangl context restored:', e);

File diff suppressed because it is too large Load Diff

View File

@ -1,291 +1,291 @@
2021-09-16 10:47:21 INFO:  @vladmandic/human version 2.2.1
2021-09-16 10:47:21 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.5.0
2021-09-16 10:47:21 INFO:  tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
2021-09-16 10:47:21 INFO: 
2021-09-16 10:47:21 INFO:  test-node.js start
2021-09-16 10:47:22 STATE: test-node.js passed: create human
2021-09-16 10:47:22 INFO:  test-node.js human version: 2.2.1
2021-09-16 10:47:22 INFO:  test-node.js platform: linux x64 agent: NodeJS v16.5.0
2021-09-16 10:47:22 INFO:  test-node.js tfjs version: 3.9.0
2021-09-16 10:47:22 STATE: test-node.js passed: set backend: tensorflow
2021-09-16 10:47:22 STATE: test-node.js tensors 573
2021-09-16 10:47:22 STATE: test-node.js passed: load models
2021-09-16 10:47:22 STATE: test-node.js result: defined models: 14 loaded models: 3
2021-09-16 10:47:22 STATE: test-node.js passed: warmup: none default
2021-09-16 10:47:22 STATE: test-node.js event: image
2021-09-16 10:47:23 STATE: test-node.js event: detect
2021-09-16 10:47:23 STATE: test-node.js event: warmup
2021-09-16 10:47:23 STATE: test-node.js passed: warmup: face default
2021-09-16 10:47:23 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":23.6,"gender":"female"} {} {}
2021-09-16 10:47:23 DATA:  test-node.js result: performance: load: 164 total: 515
2021-09-16 10:47:23 STATE: test-node.js event: image
2021-09-16 10:47:23 STATE: test-node.js event: detect
2021-09-16 10:47:23 STATE: test-node.js event: warmup
2021-09-16 10:47:23 STATE: test-node.js passed: warmup: body default
2021-09-16 10:47:23 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-16 10:47:23 DATA:  test-node.js result: performance: load: 164 total: 323
2021-09-16 10:47:23 INFO:  test-node.js test default
2021-09-16 10:47:24 STATE: test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:47:24 STATE: test-node.js event: image
2021-09-16 10:47:24 STATE: test-node.js event: detect
2021-09-16 10:47:24 STATE: test-node.js passed: detect: samples/ai-body.jpg default
2021-09-16 10:47:24 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-16 10:47:24 DATA:  test-node.js result: performance: load: 164 total: 266
2021-09-16 10:47:24 INFO:  test-node.js test body variants
2021-09-16 10:47:25 STATE: test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:47:25 STATE: test-node.js event: image
2021-09-16 10:47:25 STATE: test-node.js event: detect
2021-09-16 10:47:25 STATE: test-node.js passed: detect: samples/ai-body.jpg posenet
2021-09-16 10:47:25 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-16 10:47:25 DATA:  test-node.js result: performance: load: 164 total: 150
2021-09-16 10:47:26 STATE: test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:47:26 STATE: test-node.js event: image
2021-09-16 10:47:26 STATE: test-node.js event: detect
2021-09-16 10:47:26 STATE: test-node.js passed: detect: samples/ai-body.jpg movenet
2021-09-16 10:47:26 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-16 10:47:26 DATA:  test-node.js result: performance: load: 164 total: 160
2021-09-16 10:47:26 STATE: test-node.js event: image
2021-09-16 10:47:26 STATE: test-node.js event: detect
2021-09-16 10:47:26 STATE: test-node.js passed: detect: random default
2021-09-16 10:47:26 DATA:  test-node.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {}
2021-09-16 10:47:26 DATA:  test-node.js result: performance: load: 164 total: 61
2021-09-16 10:47:26 INFO:  test-node.js test: first instance
2021-09-16 10:47:26 STATE: test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:47:26 STATE: test-node.js event: image
2021-09-16 10:47:27 STATE: test-node.js event: detect
2021-09-16 10:47:27 STATE: test-node.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:47:27 DATA:  test-node.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {}
2021-09-16 10:47:27 DATA:  test-node.js result: performance: load: 164 total: 0
2021-09-16 10:47:27 INFO:  test-node.js test: second instance
2021-09-16 10:47:27 STATE: test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:47:27 STATE: test-node.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:47:27 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {"score":0.69,"keypoints":10}
2021-09-16 10:47:27 DATA:  test-node.js result: performance: load: 95 total: 316
2021-09-16 10:47:27 INFO:  test-node.js test: concurrent
2021-09-16 10:47:27 STATE: test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-16 10:47:27 STATE: test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-16 10:47:28 STATE: test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:47:29 STATE: test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:47:29 STATE: test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:47:29 STATE: test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:47:29 STATE: test-node.js event: image
2021-09-16 10:47:29 STATE: test-node.js event: image
2021-09-16 10:47:29 STATE: test-node.js event: image
2021-09-16 10:47:30 STATE: test-node.js passed: detect: samples/ai-body.jpg default
2021-09-16 10:47:30 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":0.69,"keypoints":10}
2021-09-16 10:47:30 DATA:  test-node.js result: performance: load: 95 total: 553
2021-09-16 10:47:30 STATE: test-node.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:47:30 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {"score":0.69,"keypoints":10}
2021-09-16 10:47:30 DATA:  test-node.js result: performance: load: 95 total: 639
2021-09-16 10:47:30 STATE: test-node.js passed: detect: samples/ai-face.jpg default
2021-09-16 10:47:30 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 0 person: 1 {"score":0.42,"age":29.5,"gender":"female"} {} {"score":0.47,"keypoints":4}
2021-09-16 10:47:30 DATA:  test-node.js result: performance: load: 95 total: 779
2021-09-16 10:47:31 STATE: test-node.js event: detect
2021-09-16 10:47:31 STATE: test-node.js passed: detect: samples/ai-body.jpg default
2021-09-16 10:47:31 DATA:  test-node.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {}
2021-09-16 10:47:31 DATA:  test-node.js result: performance: load: 164 total: 1130
2021-09-16 10:47:31 STATE: test-node.js event: detect
2021-09-16 10:47:31 STATE: test-node.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:47:31 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-16 10:47:31 DATA:  test-node.js result: performance: load: 164 total: 1355
2021-09-16 10:47:31 STATE: test-node.js event: detect
2021-09-16 10:47:31 STATE: test-node.js passed: detect: samples/ai-face.jpg default
2021-09-16 10:47:31 DATA:  test-node.js result: face: 2 body: 0 hand: 0 gesture: 9 object: 0 person: 2 {"score":1,"age":23.6,"gender":"female"} {} {}
2021-09-16 10:47:31 DATA:  test-node.js result: performance: load: 164 total: 1441
2021-09-16 10:47:31 STATE: test-node.js passeed: no memory leak
2021-09-16 10:47:31 INFO:  test-node.js events: {"image":10,"detect":10,"warmup":2}
2021-09-16 10:47:31 INFO:  test-node.js test complete: 9300 ms
2021-09-16 10:47:31 INFO: 
2021-09-16 10:47:31 INFO:  test-node-gpu.js start
2021-09-16 10:47:32 WARN:  test-node-gpu.js stderr: 2021-09-16 10:47:32.032467: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
2021-09-16 10:47:32 WARN:  test-node-gpu.js stderr: 2021-09-16 10:47:32.154267: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
2021-09-16 10:47:32 WARN:  test-node-gpu.js stderr: 2021-09-16 10:47:32.154311: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
2021-09-16 10:47:32 STATE: test-node-gpu.js passed: create human
2021-09-16 10:47:32 INFO:  test-node-gpu.js human version: 2.2.1
2021-09-16 10:47:32 INFO:  test-node-gpu.js platform: linux x64 agent: NodeJS v16.5.0
2021-09-16 10:47:32 INFO:  test-node-gpu.js tfjs version: 3.9.0
2021-09-16 10:47:32 STATE: test-node-gpu.js passed: set backend: tensorflow
2021-09-16 10:47:32 STATE: test-node-gpu.js tensors 1456
2021-09-16 10:47:32 STATE: test-node-gpu.js passed: load models
2021-09-16 10:47:32 STATE: test-node-gpu.js result: defined models: 14 loaded models: 7
2021-09-16 10:47:32 STATE: test-node-gpu.js passed: warmup: none default
2021-09-16 10:47:32 STATE: test-node-gpu.js event: image
2021-09-16 10:47:33 STATE: test-node-gpu.js event: detect
2021-09-16 10:47:33 STATE: test-node-gpu.js event: warmup
2021-09-16 10:47:33 STATE: test-node-gpu.js passed: warmup: face default
2021-09-16 10:47:33 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
2021-09-16 10:47:33 DATA:  test-node-gpu.js result: performance: load: 298 total: 1079
2021-09-16 10:47:33 STATE: test-node-gpu.js event: image
2021-09-16 10:47:34 STATE: test-node-gpu.js event: detect
2021-09-16 10:47:34 STATE: test-node-gpu.js event: warmup
2021-09-16 10:47:34 STATE: test-node-gpu.js passed: warmup: body default
2021-09-16 10:47:34 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
2021-09-16 10:47:34 DATA:  test-node-gpu.js result: performance: load: 298 total: 1053
2021-09-16 10:47:34 INFO:  test-node-gpu.js test default
2021-09-16 10:47:35 STATE: test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:47:35 STATE: test-node-gpu.js event: image
2021-09-16 10:47:36 STATE: test-node-gpu.js event: detect
2021-09-16 10:47:36 STATE: test-node-gpu.js passed: detect: samples/ai-body.jpg default
2021-09-16 10:47:36 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
2021-09-16 10:47:36 DATA:  test-node-gpu.js result: performance: load: 298 total: 575
2021-09-16 10:47:36 INFO:  test-node-gpu.js test body variants
2021-09-16 10:47:37 STATE: test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:47:37 STATE: test-node-gpu.js event: image
2021-09-16 10:47:37 STATE: test-node-gpu.js event: detect
2021-09-16 10:47:37 STATE: test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
2021-09-16 10:47:37 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
2021-09-16 10:47:37 DATA:  test-node-gpu.js result: performance: load: 298 total: 272
2021-09-16 10:47:38 STATE: test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:47:38 STATE: test-node-gpu.js event: image
2021-09-16 10:47:38 STATE: test-node-gpu.js event: detect
2021-09-16 10:47:38 STATE: test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
2021-09-16 10:47:38 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
2021-09-16 10:47:38 DATA:  test-node-gpu.js result: performance: load: 298 total: 201
2021-09-16 10:47:38 STATE: test-node-gpu.js event: image
2021-09-16 10:47:39 STATE: test-node-gpu.js event: detect
2021-09-16 10:47:39 STATE: test-node-gpu.js passed: detect: random default
2021-09-16 10:47:39 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
2021-09-16 10:47:39 DATA:  test-node-gpu.js result: performance: load: 298 total: 562
2021-09-16 10:47:39 INFO:  test-node-gpu.js test: first instance
2021-09-16 10:47:39 STATE: test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:47:39 STATE: test-node-gpu.js event: image
2021-09-16 10:47:40 STATE: test-node-gpu.js event: detect
2021-09-16 10:47:40 STATE: test-node-gpu.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:47:40 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-16 10:47:40 DATA:  test-node-gpu.js result: performance: load: 298 total: 811
2021-09-16 10:47:40 INFO:  test-node-gpu.js test: second instance
2021-09-16 10:47:40 STATE: test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:47:40 STATE: test-node-gpu.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:47:40 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-16 10:47:40 DATA:  test-node-gpu.js result: performance: load: 4 total: 368
2021-09-16 10:47:40 INFO:  test-node-gpu.js test: concurrent
2021-09-16 10:47:40 STATE: test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-16 10:47:40 STATE: test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-16 10:47:41 STATE: test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:47:42 STATE: test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:47:43 STATE: test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:47:43 STATE: test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:47:43 STATE: test-node-gpu.js event: image
2021-09-16 10:47:43 STATE: test-node-gpu.js event: image
2021-09-16 10:47:43 STATE: test-node-gpu.js event: image
2021-09-16 10:47:45 STATE: test-node-gpu.js passed: detect: samples/ai-body.jpg default
2021-09-16 10:47:45 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-16 10:47:45 DATA:  test-node-gpu.js result: performance: load: 4 total: 1930
2021-09-16 10:47:45 STATE: test-node-gpu.js passed: detect: samples/ai-face.jpg default
2021-09-16 10:47:45 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"score":0.42,"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-16 10:47:45 DATA:  test-node-gpu.js result: performance: load: 4 total: 1931
2021-09-16 10:47:45 STATE: test-node-gpu.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:47:45 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-16 10:47:45 DATA:  test-node-gpu.js result: performance: load: 4 total: 1931
2021-09-16 10:47:46 STATE: test-node-gpu.js event: detect
2021-09-16 10:47:46 STATE: test-node-gpu.js passed: detect: samples/ai-body.jpg default
2021-09-16 10:47:46 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":10}
2021-09-16 10:47:46 DATA:  test-node-gpu.js result: performance: load: 298 total: 2664
2021-09-16 10:47:46 STATE: test-node-gpu.js event: detect
2021-09-16 10:47:46 STATE: test-node-gpu.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:47:46 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-16 10:47:46 DATA:  test-node-gpu.js result: performance: load: 298 total: 2818
2021-09-16 10:47:46 STATE: test-node-gpu.js event: detect
2021-09-16 10:47:46 STATE: test-node-gpu.js passed: detect: samples/ai-face.jpg default
2021-09-16 10:47:46 DATA:  test-node-gpu.js result: face: 2 body: 1 hand: 0 gesture: 10 object: 1 person: 2 {"score":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":10}
2021-09-16 10:47:46 DATA:  test-node-gpu.js result: performance: load: 298 total: 2899
2021-09-16 10:47:46 STATE: test-node-gpu.js passeed: no memory leak
2021-09-16 10:47:46 INFO:  test-node-gpu.js events: {"image":10,"detect":10,"warmup":2}
2021-09-16 10:47:46 INFO:  test-node-gpu.js test complete: 13989 ms
2021-09-16 10:47:46 INFO: 
2021-09-16 10:47:46 INFO:  test-node-wasm.js start
2021-09-16 10:47:48 STATE: test-node-wasm.js passed: model server: https://vladmandic.github.io/human/models/
2021-09-16 10:47:48 STATE: test-node-wasm.js passed: create human
2021-09-16 10:47:48 INFO:  test-node-wasm.js human version: 2.2.1
2021-09-16 10:47:48 INFO:  test-node-wasm.js platform: linux x64 agent: NodeJS v16.5.0
2021-09-16 10:47:48 INFO:  test-node-wasm.js tfjs version: 3.9.0
2021-09-16 10:47:51 STATE: test-node-wasm.js passed: set backend: wasm
2021-09-16 10:47:51 STATE: test-node-wasm.js tensors 1189
2021-09-16 10:47:51 STATE: test-node-wasm.js passed: load models
2021-09-16 10:47:51 STATE: test-node-wasm.js result: defined models: 14 loaded models: 6
2021-09-16 10:47:51 STATE: test-node-wasm.js passed: warmup: none default
2021-09-16 10:47:52 STATE: test-node-wasm.js event: image
2021-09-16 10:47:52 STATE: test-node-wasm.js event: detect
2021-09-16 10:47:52 STATE: test-node-wasm.js event: warmup
2021-09-16 10:47:52 STATE: test-node-wasm.js passed: warmup: face default
2021-09-16 10:47:52 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":23.6,"gender":"female"} {} {"score":0.47,"keypoints":4}
2021-09-16 10:47:52 DATA:  test-node-wasm.js result: performance: load: 3568 total: 1057
2021-09-16 10:47:56 STATE: test-node-wasm.js event: image
2021-09-16 10:47:57 STATE: test-node-wasm.js event: detect
2021-09-16 10:47:57 STATE: test-node-wasm.js event: warmup
2021-09-16 10:47:57 STATE: test-node-wasm.js passed: warmup: body default
2021-09-16 10:47:57 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:47:57 DATA:  test-node-wasm.js result: performance: load: 3568 total: 2712
2021-09-16 10:47:57 INFO:  test-node-wasm.js test default
2021-09-16 10:47:59 STATE: test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:48:00 STATE: test-node-wasm.js event: image
2021-09-16 10:48:01 STATE: test-node-wasm.js event: detect
2021-09-16 10:48:01 STATE: test-node-wasm.js passed: detect: samples/ai-body.jpg default
2021-09-16 10:48:01 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:01 DATA:  test-node-wasm.js result: performance: load: 3568 total: 2354
2021-09-16 10:48:01 INFO:  test-node-wasm.js test body variants
2021-09-16 10:48:03 STATE: test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:48:05 STATE: test-node-wasm.js event: image
2021-09-16 10:48:05 STATE: test-node-wasm.js event: detect
2021-09-16 10:48:05 STATE: test-node-wasm.js passed: detect: samples/ai-body.jpg posenet
2021-09-16 10:48:05 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.96,"keypoints":16}
2021-09-16 10:48:05 DATA:  test-node-wasm.js result: performance: load: 3568 total: 1932
2021-09-16 10:48:07 STATE: test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:48:08 STATE: test-node-wasm.js event: image
2021-09-16 10:48:09 STATE: test-node-wasm.js event: detect
2021-09-16 10:48:09 STATE: test-node-wasm.js passed: detect: samples/ai-body.jpg movenet
2021-09-16 10:48:09 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:09 DATA:  test-node-wasm.js result: performance: load: 3568 total: 1887
2021-09-16 10:48:10 STATE: test-node-wasm.js event: image
2021-09-16 10:48:10 STATE: test-node-wasm.js event: detect
2021-09-16 10:48:10 STATE: test-node-wasm.js passed: detect: random default
2021-09-16 10:48:10 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:10 DATA:  test-node-wasm.js result: performance: load: 3568 total: 1724
2021-09-16 10:48:10 INFO:  test-node-wasm.js test: first instance
2021-09-16 10:48:11 STATE: test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:48:13 STATE: test-node-wasm.js event: image
2021-09-16 10:48:13 STATE: test-node-wasm.js event: detect
2021-09-16 10:48:13 STATE: test-node-wasm.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:48:13 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:13 DATA:  test-node-wasm.js result: performance: load: 3568 total: 1787
2021-09-16 10:48:13 INFO:  test-node-wasm.js test: second instance
2021-09-16 10:48:13 STATE: test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:48:16 STATE: test-node-wasm.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:48:16 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:16 DATA:  test-node-wasm.js result: performance: load: 6 total: 2381
2021-09-16 10:48:16 INFO:  test-node-wasm.js test: concurrent
2021-09-16 10:48:16 STATE: test-node-wasm.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-16 10:48:16 STATE: test-node-wasm.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-16 10:48:17 STATE: test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:48:19 STATE: test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-16 10:48:20 STATE: test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:48:20 STATE: test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-16 10:48:22 STATE: test-node-wasm.js event: image
2021-09-16 10:48:25 STATE: test-node-wasm.js event: image
2021-09-16 10:48:28 STATE: test-node-wasm.js event: image
2021-09-16 10:48:31 STATE: test-node-wasm.js passed: detect: samples/ai-face.jpg default
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: performance: load: 6 total: 10592
2021-09-16 10:48:31 STATE: test-node-wasm.js passed: detect: samples/ai-body.jpg default
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: performance: load: 6 total: 10592
2021-09-16 10:48:31 STATE: test-node-wasm.js event: detect
2021-09-16 10:48:31 STATE: test-node-wasm.js event: detect
2021-09-16 10:48:31 STATE: test-node-wasm.js event: detect
2021-09-16 10:48:31 STATE: test-node-wasm.js passed: detect: samples/ai-face.jpg default
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: performance: load: 3568 total: 10595
2021-09-16 10:48:31 STATE: test-node-wasm.js passed: detect: samples/ai-body.jpg default
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: performance: load: 3568 total: 10595
2021-09-16 10:48:31 STATE: test-node-wasm.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: performance: load: 3568 total: 10595
2021-09-16 10:48:31 STATE: test-node-wasm.js passed: detect: samples/ai-upper.jpg default
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-16 10:48:31 DATA:  test-node-wasm.js result: performance: load: 6 total: 10596
2021-09-16 10:48:31 STATE: test-node-wasm.js passeed: no memory leak
2021-09-16 10:48:31 INFO:  test-node-wasm.js events: {"image":10,"detect":10,"warmup":2}
2021-09-16 10:48:31 INFO:  test-node-wasm.js test complete: 43047 ms
2021-09-16 10:48:31 INFO: 
2021-09-16 10:48:31 INFO:  status: {"passed":88,"failed":0}
2021-09-17 14:06:19 INFO:  @vladmandic/human version 2.2.1
2021-09-17 14:06:19 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.5.0
2021-09-17 14:06:19 INFO:  tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
2021-09-17 14:06:19 INFO: 
2021-09-17 14:06:19 INFO:  test-node.js start
2021-09-17 14:06:20 STATE: test-node.js passed: create human
2021-09-17 14:06:20 INFO:  test-node.js human version: 2.2.1
2021-09-17 14:06:20 INFO:  test-node.js platform: linux x64 agent: NodeJS v16.5.0
2021-09-17 14:06:20 INFO:  test-node.js tfjs version: 3.9.0
2021-09-17 14:06:21 STATE: test-node.js passed: set backend: tensorflow
2021-09-17 14:06:21 STATE: test-node.js tensors 573
2021-09-17 14:06:21 STATE: test-node.js passed: load models
2021-09-17 14:06:21 STATE: test-node.js result: defined models: 14 loaded models: 3
2021-09-17 14:06:21 STATE: test-node.js passed: warmup: none default
2021-09-17 14:06:21 STATE: test-node.js event: image
2021-09-17 14:06:21 STATE: test-node.js event: detect
2021-09-17 14:06:21 STATE: test-node.js event: warmup
2021-09-17 14:06:21 STATE: test-node.js passed: warmup: face default
2021-09-17 14:06:21 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":23.6,"gender":"female"} {} {}
2021-09-17 14:06:21 DATA:  test-node.js result: performance: load: 118 total: 611
2021-09-17 14:06:21 STATE: test-node.js event: image
2021-09-17 14:06:22 STATE: test-node.js event: detect
2021-09-17 14:06:22 STATE: test-node.js event: warmup
2021-09-17 14:06:22 STATE: test-node.js passed: warmup: body default
2021-09-17 14:06:22 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-17 14:06:22 DATA:  test-node.js result: performance: load: 118 total: 320
2021-09-17 14:06:22 INFO:  test-node.js test default
2021-09-17 14:06:22 STATE: test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:22 STATE: test-node.js event: image
2021-09-17 14:06:23 STATE: test-node.js event: detect
2021-09-17 14:06:23 STATE: test-node.js passed: detect: samples/ai-body.jpg default
2021-09-17 14:06:23 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-17 14:06:23 DATA:  test-node.js result: performance: load: 118 total: 263
2021-09-17 14:06:23 INFO:  test-node.js test body variants
2021-09-17 14:06:23 STATE: test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:23 STATE: test-node.js event: image
2021-09-17 14:06:24 STATE: test-node.js event: detect
2021-09-17 14:06:24 STATE: test-node.js passed: detect: samples/ai-body.jpg posenet
2021-09-17 14:06:24 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-17 14:06:24 DATA:  test-node.js result: performance: load: 118 total: 159
2021-09-17 14:06:24 STATE: test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:24 STATE: test-node.js event: image
2021-09-17 14:06:24 STATE: test-node.js event: detect
2021-09-17 14:06:24 STATE: test-node.js passed: detect: samples/ai-body.jpg movenet
2021-09-17 14:06:24 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-17 14:06:24 DATA:  test-node.js result: performance: load: 118 total: 142
2021-09-17 14:06:25 STATE: test-node.js event: image
2021-09-17 14:06:25 STATE: test-node.js event: detect
2021-09-17 14:06:25 STATE: test-node.js passed: detect: random default
2021-09-17 14:06:25 DATA:  test-node.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {}
2021-09-17 14:06:25 DATA:  test-node.js result: performance: load: 118 total: 85
2021-09-17 14:06:25 INFO:  test-node.js test: first instance
2021-09-17 14:06:25 STATE: test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:06:25 STATE: test-node.js event: image
2021-09-17 14:06:25 STATE: test-node.js event: detect
2021-09-17 14:06:25 STATE: test-node.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:06:25 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-17 14:06:25 DATA:  test-node.js result: performance: load: 118 total: 301
2021-09-17 14:06:25 INFO:  test-node.js test: second instance
2021-09-17 14:06:26 STATE: test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:06:26 STATE: test-node.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:06:26 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {"score":0.69,"keypoints":10}
2021-09-17 14:06:26 DATA:  test-node.js result: performance: load: 39 total: 322
2021-09-17 14:06:26 INFO:  test-node.js test: concurrent
2021-09-17 14:06:26 STATE: test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-17 14:06:26 STATE: test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-17 14:06:27 STATE: test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:28 STATE: test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:28 STATE: test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:06:28 STATE: test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:06:28 STATE: test-node.js event: image
2021-09-17 14:06:28 STATE: test-node.js event: image
2021-09-17 14:06:28 STATE: test-node.js event: image
2021-09-17 14:06:29 STATE: test-node.js passed: detect: samples/ai-body.jpg default
2021-09-17 14:06:29 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":0.69,"keypoints":10}
2021-09-17 14:06:29 DATA:  test-node.js result: performance: load: 39 total: 518
2021-09-17 14:06:29 STATE: test-node.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:06:29 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {"score":0.69,"keypoints":10}
2021-09-17 14:06:29 DATA:  test-node.js result: performance: load: 39 total: 600
2021-09-17 14:06:29 STATE: test-node.js passed: detect: samples/ai-face.jpg default
2021-09-17 14:06:29 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 0 person: 1 {"score":0.42,"age":29.5,"gender":"female"} {} {"score":0.47,"keypoints":4}
2021-09-17 14:06:29 DATA:  test-node.js result: performance: load: 39 total: 726
2021-09-17 14:06:29 STATE: test-node.js event: detect
2021-09-17 14:06:29 STATE: test-node.js passed: detect: samples/ai-body.jpg default
2021-09-17 14:06:29 DATA:  test-node.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {}
2021-09-17 14:06:29 DATA:  test-node.js result: performance: load: 118 total: 1067
2021-09-17 14:06:29 STATE: test-node.js event: detect
2021-09-17 14:06:29 STATE: test-node.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:06:29 DATA:  test-node.js result: face: 1 body: 0 hand: 0 gesture: 2 object: 0 person: 1 {"score":1,"age":29.5,"gender":"female"} {} {}
2021-09-17 14:06:29 DATA:  test-node.js result: performance: load: 118 total: 1285
2021-09-17 14:06:29 STATE: test-node.js event: detect
2021-09-17 14:06:29 STATE: test-node.js passed: detect: samples/ai-face.jpg default
2021-09-17 14:06:29 DATA:  test-node.js result: face: 2 body: 0 hand: 0 gesture: 9 object: 0 person: 2 {"score":1,"age":23.6,"gender":"female"} {} {}
2021-09-17 14:06:29 DATA:  test-node.js result: performance: load: 118 total: 1365
2021-09-17 14:06:29 STATE: test-node.js passeed: no memory leak
2021-09-17 14:06:29 INFO:  test-node.js events: {"image":10,"detect":10,"warmup":2}
2021-09-17 14:06:29 INFO:  test-node.js test complete: 9086 ms
2021-09-17 14:06:30 INFO: 
2021-09-17 14:06:30 INFO:  test-node-gpu.js start
2021-09-17 14:06:31 WARN:  test-node-gpu.js stderr: 2021-09-17 14:06:31.024124: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
2021-09-17 14:06:31 WARN:  test-node-gpu.js stderr: 2021-09-17 14:06:31.304327: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
2021-09-17 14:06:31 WARN:  test-node-gpu.js stderr: 2021-09-17 14:06:31.304481: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
2021-09-17 14:06:31 STATE: test-node-gpu.js passed: create human
2021-09-17 14:06:31 INFO:  test-node-gpu.js human version: 2.2.1
2021-09-17 14:06:31 INFO:  test-node-gpu.js platform: linux x64 agent: NodeJS v16.5.0
2021-09-17 14:06:31 INFO:  test-node-gpu.js tfjs version: 3.9.0
2021-09-17 14:06:31 STATE: test-node-gpu.js passed: set backend: tensorflow
2021-09-17 14:06:31 STATE: test-node-gpu.js tensors 1456
2021-09-17 14:06:31 STATE: test-node-gpu.js passed: load models
2021-09-17 14:06:31 STATE: test-node-gpu.js result: defined models: 14 loaded models: 7
2021-09-17 14:06:31 STATE: test-node-gpu.js passed: warmup: none default
2021-09-17 14:06:31 STATE: test-node-gpu.js event: image
2021-09-17 14:06:33 STATE: test-node-gpu.js event: detect
2021-09-17 14:06:33 STATE: test-node-gpu.js event: warmup
2021-09-17 14:06:33 STATE: test-node-gpu.js passed: warmup: face default
2021-09-17 14:06:33 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
2021-09-17 14:06:33 DATA:  test-node-gpu.js result: performance: load: 289 total: 1296
2021-09-17 14:06:33 STATE: test-node-gpu.js event: image
2021-09-17 14:06:34 STATE: test-node-gpu.js event: detect
2021-09-17 14:06:34 STATE: test-node-gpu.js event: warmup
2021-09-17 14:06:34 STATE: test-node-gpu.js passed: warmup: body default
2021-09-17 14:06:34 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
2021-09-17 14:06:34 DATA:  test-node-gpu.js result: performance: load: 289 total: 1108
2021-09-17 14:06:34 INFO:  test-node-gpu.js test default
2021-09-17 14:06:35 STATE: test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:35 STATE: test-node-gpu.js event: image
2021-09-17 14:06:35 STATE: test-node-gpu.js event: detect
2021-09-17 14:06:35 STATE: test-node-gpu.js passed: detect: samples/ai-body.jpg default
2021-09-17 14:06:35 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
2021-09-17 14:06:35 DATA:  test-node-gpu.js result: performance: load: 289 total: 596
2021-09-17 14:06:35 INFO:  test-node-gpu.js test body variants
2021-09-17 14:06:36 STATE: test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:36 STATE: test-node-gpu.js event: image
2021-09-17 14:06:36 STATE: test-node-gpu.js event: detect
2021-09-17 14:06:36 STATE: test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
2021-09-17 14:06:36 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
2021-09-17 14:06:36 DATA:  test-node-gpu.js result: performance: load: 289 total: 253
2021-09-17 14:06:37 STATE: test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:37 STATE: test-node-gpu.js event: image
2021-09-17 14:06:37 STATE: test-node-gpu.js event: detect
2021-09-17 14:06:37 STATE: test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
2021-09-17 14:06:37 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
2021-09-17 14:06:37 DATA:  test-node-gpu.js result: performance: load: 289 total: 195
2021-09-17 14:06:37 STATE: test-node-gpu.js event: image
2021-09-17 14:06:38 STATE: test-node-gpu.js event: detect
2021-09-17 14:06:38 STATE: test-node-gpu.js passed: detect: random default
2021-09-17 14:06:38 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
2021-09-17 14:06:38 DATA:  test-node-gpu.js result: performance: load: 289 total: 560
2021-09-17 14:06:38 INFO:  test-node-gpu.js test: first instance
2021-09-17 14:06:38 STATE: test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:06:38 STATE: test-node-gpu.js event: image
2021-09-17 14:06:39 STATE: test-node-gpu.js event: detect
2021-09-17 14:06:39 STATE: test-node-gpu.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:06:39 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-17 14:06:39 DATA:  test-node-gpu.js result: performance: load: 289 total: 805
2021-09-17 14:06:39 INFO:  test-node-gpu.js test: second instance
2021-09-17 14:06:39 STATE: test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:06:40 STATE: test-node-gpu.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:06:40 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-17 14:06:40 DATA:  test-node-gpu.js result: performance: load: 2 total: 344
2021-09-17 14:06:40 INFO:  test-node-gpu.js test: concurrent
2021-09-17 14:06:40 STATE: test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-17 14:06:40 STATE: test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-17 14:06:41 STATE: test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:41 STATE: test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:42 STATE: test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:06:42 STATE: test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:06:42 STATE: test-node-gpu.js event: image
2021-09-17 14:06:42 STATE: test-node-gpu.js event: image
2021-09-17 14:06:42 STATE: test-node-gpu.js event: image
2021-09-17 14:06:44 STATE: test-node-gpu.js passed: detect: samples/ai-body.jpg default
2021-09-17 14:06:44 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-17 14:06:44 DATA:  test-node-gpu.js result: performance: load: 2 total: 1899
2021-09-17 14:06:44 STATE: test-node-gpu.js passed: detect: samples/ai-face.jpg default
2021-09-17 14:06:44 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"score":0.42,"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-17 14:06:44 DATA:  test-node-gpu.js result: performance: load: 2 total: 1908
2021-09-17 14:06:44 STATE: test-node-gpu.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:06:44 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-17 14:06:44 DATA:  test-node-gpu.js result: performance: load: 2 total: 1908
2021-09-17 14:06:45 STATE: test-node-gpu.js event: detect
2021-09-17 14:06:45 STATE: test-node-gpu.js passed: detect: samples/ai-body.jpg default
2021-09-17 14:06:45 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":10}
2021-09-17 14:06:45 DATA:  test-node-gpu.js result: performance: load: 289 total: 2616
2021-09-17 14:06:45 STATE: test-node-gpu.js event: detect
2021-09-17 14:06:45 STATE: test-node-gpu.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:06:45 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"score":1,"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
2021-09-17 14:06:45 DATA:  test-node-gpu.js result: performance: load: 289 total: 2791
2021-09-17 14:06:45 STATE: test-node-gpu.js event: detect
2021-09-17 14:06:45 STATE: test-node-gpu.js passed: detect: samples/ai-face.jpg default
2021-09-17 14:06:45 DATA:  test-node-gpu.js result: face: 2 body: 1 hand: 0 gesture: 10 object: 1 person: 2 {"score":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":10}
2021-09-17 14:06:45 DATA:  test-node-gpu.js result: performance: load: 289 total: 2866
2021-09-17 14:06:45 STATE: test-node-gpu.js passeed: no memory leak
2021-09-17 14:06:45 INFO:  test-node-gpu.js events: {"image":10,"detect":10,"warmup":2}
2021-09-17 14:06:45 INFO:  test-node-gpu.js test complete: 14069 ms
2021-09-17 14:06:45 INFO: 
2021-09-17 14:06:45 INFO:  test-node-wasm.js start
2021-09-17 14:06:46 STATE: test-node-wasm.js passed: model server: https://vladmandic.github.io/human/models/
2021-09-17 14:06:46 STATE: test-node-wasm.js passed: create human
2021-09-17 14:06:46 INFO:  test-node-wasm.js human version: 2.2.1
2021-09-17 14:06:46 INFO:  test-node-wasm.js platform: linux x64 agent: NodeJS v16.5.0
2021-09-17 14:06:46 INFO:  test-node-wasm.js tfjs version: 3.9.0
2021-09-17 14:06:50 STATE: test-node-wasm.js passed: set backend: wasm
2021-09-17 14:06:50 STATE: test-node-wasm.js tensors 1189
2021-09-17 14:06:50 STATE: test-node-wasm.js passed: load models
2021-09-17 14:06:50 STATE: test-node-wasm.js result: defined models: 14 loaded models: 6
2021-09-17 14:06:50 STATE: test-node-wasm.js passed: warmup: none default
2021-09-17 14:06:50 STATE: test-node-wasm.js event: image
2021-09-17 14:06:51 STATE: test-node-wasm.js event: detect
2021-09-17 14:06:51 STATE: test-node-wasm.js event: warmup
2021-09-17 14:06:51 STATE: test-node-wasm.js passed: warmup: face default
2021-09-17 14:06:51 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":23.6,"gender":"female"} {} {"score":0.47,"keypoints":4}
2021-09-17 14:06:51 DATA:  test-node-wasm.js result: performance: load: 4230 total: 1055
2021-09-17 14:06:54 STATE: test-node-wasm.js event: image
2021-09-17 14:06:55 STATE: test-node-wasm.js event: detect
2021-09-17 14:06:55 STATE: test-node-wasm.js event: warmup
2021-09-17 14:06:55 STATE: test-node-wasm.js passed: warmup: body default
2021-09-17 14:06:55 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:06:55 DATA:  test-node-wasm.js result: performance: load: 4230 total: 2630
2021-09-17 14:06:55 INFO:  test-node-wasm.js test default
2021-09-17 14:06:57 STATE: test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:06:58 STATE: test-node-wasm.js event: image
2021-09-17 14:06:59 STATE: test-node-wasm.js event: detect
2021-09-17 14:06:59 STATE: test-node-wasm.js passed: detect: samples/ai-body.jpg default
2021-09-17 14:06:59 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:06:59 DATA:  test-node-wasm.js result: performance: load: 4230 total: 2254
2021-09-17 14:06:59 INFO:  test-node-wasm.js test body variants
2021-09-17 14:07:01 STATE: test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:07:03 STATE: test-node-wasm.js event: image
2021-09-17 14:07:03 STATE: test-node-wasm.js event: detect
2021-09-17 14:07:03 STATE: test-node-wasm.js passed: detect: samples/ai-body.jpg posenet
2021-09-17 14:07:03 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.96,"keypoints":16}
2021-09-17 14:07:03 DATA:  test-node-wasm.js result: performance: load: 4230 total: 1877
2021-09-17 14:07:05 STATE: test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:07:06 STATE: test-node-wasm.js event: image
2021-09-17 14:07:07 STATE: test-node-wasm.js event: detect
2021-09-17 14:07:07 STATE: test-node-wasm.js passed: detect: samples/ai-body.jpg movenet
2021-09-17 14:07:07 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:07:07 DATA:  test-node-wasm.js result: performance: load: 4230 total: 1839
2021-09-17 14:07:08 STATE: test-node-wasm.js event: image
2021-09-17 14:07:08 STATE: test-node-wasm.js event: detect
2021-09-17 14:07:08 STATE: test-node-wasm.js passed: detect: random default
2021-09-17 14:07:08 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:07:08 DATA:  test-node-wasm.js result: performance: load: 4230 total: 1675
2021-09-17 14:07:08 INFO:  test-node-wasm.js test: first instance
2021-09-17 14:07:09 STATE: test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:07:11 STATE: test-node-wasm.js event: image
2021-09-17 14:07:11 STATE: test-node-wasm.js event: detect
2021-09-17 14:07:11 STATE: test-node-wasm.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:07:11 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:07:11 DATA:  test-node-wasm.js result: performance: load: 4230 total: 1716
2021-09-17 14:07:11 INFO:  test-node-wasm.js test: second instance
2021-09-17 14:07:11 STATE: test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:07:13 STATE: test-node-wasm.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:07:13 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:07:13 DATA:  test-node-wasm.js result: performance: load: 2 total: 2220
2021-09-17 14:07:13 INFO:  test-node-wasm.js test: concurrent
2021-09-17 14:07:14 STATE: test-node-wasm.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-17 14:07:14 STATE: test-node-wasm.js passed: load image: samples/ai-face.jpg [1,256,256,3]
2021-09-17 14:07:15 STATE: test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:07:17 STATE: test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
2021-09-17 14:07:17 STATE: test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:07:18 STATE: test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
2021-09-17 14:07:20 STATE: test-node-wasm.js event: image
2021-09-17 14:07:23 STATE: test-node-wasm.js event: image
2021-09-17 14:07:26 STATE: test-node-wasm.js event: image
2021-09-17 14:07:28 STATE: test-node-wasm.js passed: detect: samples/ai-face.jpg default
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: performance: load: 2 total: 10210
2021-09-17 14:07:28 STATE: test-node-wasm.js passed: detect: samples/ai-body.jpg default
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: performance: load: 2 total: 10210
2021-09-17 14:07:28 STATE: test-node-wasm.js event: detect
2021-09-17 14:07:28 STATE: test-node-wasm.js event: detect
2021-09-17 14:07:28 STATE: test-node-wasm.js event: detect
2021-09-17 14:07:28 STATE: test-node-wasm.js passed: detect: samples/ai-face.jpg default
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: performance: load: 4230 total: 10212
2021-09-17 14:07:28 STATE: test-node-wasm.js passed: detect: samples/ai-body.jpg default
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: performance: load: 4230 total: 10212
2021-09-17 14:07:28 STATE: test-node-wasm.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: performance: load: 4230 total: 10212
2021-09-17 14:07:28 STATE: test-node-wasm.js passed: detect: samples/ai-upper.jpg default
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"score":1,"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
2021-09-17 14:07:28 DATA:  test-node-wasm.js result: performance: load: 2 total: 10212
2021-09-17 14:07:28 STATE: test-node-wasm.js passeed: no memory leak
2021-09-17 14:07:28 INFO:  test-node-wasm.js events: {"image":10,"detect":10,"warmup":2}
2021-09-17 14:07:28 INFO:  test-node-wasm.js test complete: 42385 ms
2021-09-17 14:07:28 INFO: 
2021-09-17 14:07:28 INFO:  status: {"passed":88,"failed":0}