initial automated browser tests

pull/193/head
Vladimir Mandic 2021-09-20 17:17:13 -04:00
parent 66428abc12
commit 8bed89e7d4
19 changed files with 5285 additions and 310 deletions

View File

@ -102,6 +102,10 @@ function mergeDeep(...objects) {
return prev; return prev;
}, {}); }, {});
} }
async function wait(time) {
const waiting = new Promise((resolve) => setTimeout(() => resolve(true), time));
await waiting;
}
// src/config.ts // src/config.ts
var config = { var config = {
@ -112,6 +116,7 @@ var config = {
async: true, async: true,
warmup: "full", warmup: "full",
cacheSensitivity: 0.75, cacheSensitivity: 0.75,
yield: false,
skipFrame: false, skipFrame: false,
filter: { filter: {
enabled: true, enabled: true,
@ -4448,7 +4453,7 @@ var fx;
function canvas(width, height) { function canvas(width, height) {
let c; let c;
if (env.browser) { if (env.browser) {
if (typeof OffscreenCanvas !== "undefined") { if (env.offscreen) {
c = new OffscreenCanvas(width, height); c = new OffscreenCanvas(width, height);
} else { } else {
c = document.createElement("canvas"); c = document.createElement("canvas");
@ -4471,6 +4476,8 @@ function process2(input, config3) {
throw new Error("input type is not recognized"); throw new Error("input type is not recognized");
} }
if (input instanceof tfjs_esm_exports.Tensor) { if (input instanceof tfjs_esm_exports.Tensor) {
if (input.isDisposed)
throw new Error("input tensor is disposed");
if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3) if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3)
tensor3 = tfjs_esm_exports.clone(input); tensor3 = tfjs_esm_exports.clone(input);
else else
@ -4652,6 +4659,7 @@ var env = {
agent: void 0, agent: void 0,
initial: true, initial: true,
backends: [], backends: [],
offscreen: void 0,
tfjs: { tfjs: {
version: void 0 version: void 0
}, },
@ -4707,6 +4715,7 @@ async function get() {
env.node = typeof process !== "undefined"; env.node = typeof process !== "undefined";
env.worker = env.browser ? typeof WorkerGlobalScope !== "undefined" : void 0; env.worker = env.browser ? typeof WorkerGlobalScope !== "undefined" : void 0;
env.tfjs.version = tfjs_esm_exports.version_core; env.tfjs.version = tfjs_esm_exports.version_core;
env.offscreen = typeof env.offscreen === "undefined" ? typeof OffscreenCanvas !== void 0 : env.offscreen;
if (typeof navigator !== "undefined") { if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g); const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) { if (raw && raw[0]) {
@ -4723,6 +4732,9 @@ async function get() {
} }
await backendInfo(); await backendInfo();
} }
async function set(obj) {
env = mergeDeep(env, obj);
}
// src/blazeface/facepipeline.ts // src/blazeface/facepipeline.ts
var leftOutline = MESH_ANNOTATIONS["leftEyeLower0"]; var leftOutline = MESH_ANNOTATIONS["leftEyeLower0"];
@ -11206,6 +11218,8 @@ function extensions() {
} }
async function register(instance) { async function register(instance) {
var _a; var _a;
if (instance.config.backend !== "humangl")
return;
if (config2.name in tfjs_esm_exports.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) { if (config2.name in tfjs_esm_exports.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) {
log("error: humangl backend invalid context"); log("error: humangl backend invalid context");
reset(instance); reset(instance);
@ -11243,11 +11257,12 @@ async function register(instance) {
log("error: cannot set WebGL context:", err); log("error: cannot set WebGL context:", err);
return; return;
} }
const current = tfjs_esm_exports.backend().getGPGPUContext().gl; const current = tfjs_esm_exports.backend().getGPGPUContext ? tfjs_esm_exports.backend().getGPGPUContext().gl : null;
if (current) { if (current) {
log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`); log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`);
} else { } else {
log("error: no current context:", current, config2.gl); log("error: no current gl context:", current, config2.gl);
return;
} }
try { try {
const ctx = new tfjs_esm_exports.GPGPUContext(config2.gl); const ctx = new tfjs_esm_exports.GPGPUContext(config2.gl);
@ -12313,6 +12328,7 @@ var Human = class {
} }
init() { init() {
check(this); check(this);
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -12355,6 +12371,8 @@ var Human = class {
return warmup(this, userConfig); return warmup(this, userConfig);
} }
async detect(input, userConfig) { async detect(input, userConfig) {
if (this.config.yield)
await wait(1);
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
this.state = "config"; this.state = "config";
@ -12370,6 +12388,8 @@ var Human = class {
const timeStart = now(); const timeStart = now();
await check(this); await check(this);
await this.load(); await this.load();
if (this.config.yield)
await wait(1);
timeStamp = now(); timeStamp = now();
let img = process2(input, this.config); let img = process2(input, this.config);
this.process = img; this.process = img;
@ -12411,12 +12431,12 @@ var Human = class {
let bodyRes = []; let bodyRes = [];
let handRes = []; let handRes = [];
let objectRes = []; let objectRes = [];
this.state = "run:face";
if (this.config.async) { if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? detectFace(this, img.tensor) : [];
if (this.performance.face) if (this.performance.face)
delete this.performance.face; delete this.performance.face;
} else { } else {
this.state = "run:face";
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -12424,6 +12444,7 @@ var Human = class {
this.performance.face = elapsedTime; this.performance.face = elapsedTime;
} }
this.analyze("Start Body:"); this.analyze("Start Body:");
this.state = "run:body";
if (this.config.async) { if (this.config.async) {
if ((_a = this.config.body.modelPath) == null ? void 0 : _a.includes("posenet")) if ((_a = this.config.body.modelPath) == null ? void 0 : _a.includes("posenet"))
bodyRes = this.config.body.enabled ? predict4(img.tensor, this.config) : []; bodyRes = this.config.body.enabled ? predict4(img.tensor, this.config) : [];
@ -12436,7 +12457,6 @@ var Human = class {
if (this.performance.body) if (this.performance.body)
delete this.performance.body; delete this.performance.body;
} else { } else {
this.state = "run:body";
timeStamp = now(); timeStamp = now();
if ((_e = this.config.body.modelPath) == null ? void 0 : _e.includes("posenet")) if ((_e = this.config.body.modelPath) == null ? void 0 : _e.includes("posenet"))
bodyRes = this.config.body.enabled ? await predict4(img.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict4(img.tensor, this.config) : [];
@ -12452,12 +12472,12 @@ var Human = class {
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
this.state = "run:hand";
if (this.config.async) { if (this.config.async) {
handRes = this.config.hand.enabled ? predict5(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? predict5(img.tensor, this.config) : [];
if (this.performance.hand) if (this.performance.hand)
delete this.performance.hand; delete this.performance.hand;
} else { } else {
this.state = "run:hand";
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? await predict5(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -12466,6 +12486,7 @@ var Human = class {
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
this.state = "run:object";
if (this.config.async) { if (this.config.async) {
if ((_i = this.config.object.modelPath) == null ? void 0 : _i.includes("nanodet")) if ((_i = this.config.object.modelPath) == null ? void 0 : _i.includes("nanodet"))
objectRes = this.config.object.enabled ? predict9(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? predict9(img.tensor, this.config) : [];
@ -12474,7 +12495,6 @@ var Human = class {
if (this.performance.object) if (this.performance.object)
delete this.performance.object; delete this.performance.object;
} else { } else {
this.state = "run:object";
timeStamp = now(); timeStamp = now();
if ((_k = this.config.object.modelPath) == null ? void 0 : _k.includes("nanodet")) if ((_k = this.config.object.modelPath) == null ? void 0 : _k.includes("nanodet"))
objectRes = this.config.object.enabled ? await predict9(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict9(img.tensor, this.config) : [];
@ -12485,8 +12505,12 @@ var Human = class {
this.performance.object = elapsedTime; this.performance.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
this.state = "run:await";
if (this.config.yield)
await wait(1);
if (this.config.async) if (this.config.async)
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
this.state = "run:gesture";
let gestureRes = []; let gestureRes = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
@ -12497,7 +12521,6 @@ var Human = class {
delete this.performance.gesture; delete this.performance.gesture;
} }
this.performance.total = Math.trunc(now() - timeStart); this.performance.total = Math.trunc(now() - timeStart);
this.state = "idle";
const shape = ((_n = (_m = this.process) == null ? void 0 : _m.tensor) == null ? void 0 : _n.shape) || []; const shape = ((_n = (_m = this.process) == null ? void 0 : _m.tensor) == null ? void 0 : _n.shape) || [];
this.result = { this.result = {
face: faceRes, face: faceRes,
@ -12514,6 +12537,7 @@ var Human = class {
}; };
tfjs_esm_exports.dispose(img.tensor); tfjs_esm_exports.dispose(img.tensor);
this.emit("detect"); this.emit("detect");
this.state = "idle";
resolve(this.result); resolve(this.result);
}); });
} }

File diff suppressed because one or more lines are too long

40
dist/human.esm.js vendored
View File

@ -91,6 +91,10 @@ function mergeDeep(...objects) {
return prev; return prev;
}, {}); }, {});
} }
async function wait(time2) {
const waiting = new Promise((resolve) => setTimeout(() => resolve(true), time2));
await waiting;
}
// src/config.ts // src/config.ts
var config = { var config = {
@ -101,6 +105,7 @@ var config = {
async: true, async: true,
warmup: "full", warmup: "full",
cacheSensitivity: 0.75, cacheSensitivity: 0.75,
yield: false,
skipFrame: false, skipFrame: false,
filter: { filter: {
enabled: true, enabled: true,
@ -64505,7 +64510,7 @@ var fx;
function canvas(width, height) { function canvas(width, height) {
let c; let c;
if (env2.browser) { if (env2.browser) {
if (typeof OffscreenCanvas !== "undefined") { if (env2.offscreen) {
c = new OffscreenCanvas(width, height); c = new OffscreenCanvas(width, height);
} else { } else {
c = document.createElement("canvas"); c = document.createElement("canvas");
@ -64528,6 +64533,8 @@ function process2(input2, config3) {
throw new Error("input type is not recognized"); throw new Error("input type is not recognized");
} }
if (input2 instanceof Tensor) { if (input2 instanceof Tensor) {
if (input2.isDisposed)
throw new Error("input tensor is disposed");
if (input2.shape && input2.shape.length === 4 && input2.shape[0] === 1 && input2.shape[3] === 3) if (input2.shape && input2.shape.length === 4 && input2.shape[0] === 1 && input2.shape[3] === 3)
tensor2 = clone(input2); tensor2 = clone(input2);
else else
@ -64709,6 +64716,7 @@ var env2 = {
agent: void 0, agent: void 0,
initial: true, initial: true,
backends: [], backends: [],
offscreen: void 0,
tfjs: { tfjs: {
version: void 0 version: void 0
}, },
@ -64764,6 +64772,7 @@ async function get3() {
env2.node = typeof process !== "undefined"; env2.node = typeof process !== "undefined";
env2.worker = env2.browser ? typeof WorkerGlobalScope !== "undefined" : void 0; env2.worker = env2.browser ? typeof WorkerGlobalScope !== "undefined" : void 0;
env2.tfjs.version = version; env2.tfjs.version = version;
env2.offscreen = typeof env2.offscreen === "undefined" ? typeof OffscreenCanvas !== void 0 : env2.offscreen;
if (typeof navigator !== "undefined") { if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g); const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) { if (raw && raw[0]) {
@ -64780,6 +64789,9 @@ async function get3() {
} }
await backendInfo(); await backendInfo();
} }
async function set(obj) {
env2 = mergeDeep(env2, obj);
}
// src/blazeface/facepipeline.ts // src/blazeface/facepipeline.ts
var leftOutline = MESH_ANNOTATIONS["leftEyeLower0"]; var leftOutline = MESH_ANNOTATIONS["leftEyeLower0"];
@ -71263,6 +71275,8 @@ function extensions() {
} }
async function register(instance) { async function register(instance) {
var _a; var _a;
if (instance.config.backend !== "humangl")
return;
if (config2.name in engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) { if (config2.name in engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) {
log("error: humangl backend invalid context"); log("error: humangl backend invalid context");
reset(instance); reset(instance);
@ -71300,11 +71314,12 @@ async function register(instance) {
log("error: cannot set WebGL context:", err); log("error: cannot set WebGL context:", err);
return; return;
} }
const current = backend().getGPGPUContext().gl; const current = backend().getGPGPUContext ? backend().getGPGPUContext().gl : null;
if (current) { if (current) {
log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`); log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`);
} else { } else {
log("error: no current context:", current, config2.gl); log("error: no current gl context:", current, config2.gl);
return;
} }
try { try {
const ctx = new GPGPUContext(config2.gl); const ctx = new GPGPUContext(config2.gl);
@ -72370,6 +72385,7 @@ var Human = class {
} }
init() { init() {
check(this); check(this);
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -72412,6 +72428,8 @@ var Human = class {
return warmup(this, userConfig); return warmup(this, userConfig);
} }
async detect(input2, userConfig) { async detect(input2, userConfig) {
if (this.config.yield)
await wait(1);
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
this.state = "config"; this.state = "config";
@ -72427,6 +72445,8 @@ var Human = class {
const timeStart = now(); const timeStart = now();
await check(this); await check(this);
await this.load(); await this.load();
if (this.config.yield)
await wait(1);
timeStamp = now(); timeStamp = now();
let img = process2(input2, this.config); let img = process2(input2, this.config);
this.process = img; this.process = img;
@ -72468,12 +72488,12 @@ var Human = class {
let bodyRes = []; let bodyRes = [];
let handRes = []; let handRes = [];
let objectRes = []; let objectRes = [];
this.state = "run:face";
if (this.config.async) { if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? detectFace(this, img.tensor) : [];
if (this.performance.face) if (this.performance.face)
delete this.performance.face; delete this.performance.face;
} else { } else {
this.state = "run:face";
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -72481,6 +72501,7 @@ var Human = class {
this.performance.face = elapsedTime; this.performance.face = elapsedTime;
} }
this.analyze("Start Body:"); this.analyze("Start Body:");
this.state = "run:body";
if (this.config.async) { if (this.config.async) {
if ((_a = this.config.body.modelPath) == null ? void 0 : _a.includes("posenet")) if ((_a = this.config.body.modelPath) == null ? void 0 : _a.includes("posenet"))
bodyRes = this.config.body.enabled ? predict4(img.tensor, this.config) : []; bodyRes = this.config.body.enabled ? predict4(img.tensor, this.config) : [];
@ -72493,7 +72514,6 @@ var Human = class {
if (this.performance.body) if (this.performance.body)
delete this.performance.body; delete this.performance.body;
} else { } else {
this.state = "run:body";
timeStamp = now(); timeStamp = now();
if ((_e = this.config.body.modelPath) == null ? void 0 : _e.includes("posenet")) if ((_e = this.config.body.modelPath) == null ? void 0 : _e.includes("posenet"))
bodyRes = this.config.body.enabled ? await predict4(img.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict4(img.tensor, this.config) : [];
@ -72509,12 +72529,12 @@ var Human = class {
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
this.state = "run:hand";
if (this.config.async) { if (this.config.async) {
handRes = this.config.hand.enabled ? predict5(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? predict5(img.tensor, this.config) : [];
if (this.performance.hand) if (this.performance.hand)
delete this.performance.hand; delete this.performance.hand;
} else { } else {
this.state = "run:hand";
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? await predict5(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -72523,6 +72543,7 @@ var Human = class {
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
this.state = "run:object";
if (this.config.async) { if (this.config.async) {
if ((_i = this.config.object.modelPath) == null ? void 0 : _i.includes("nanodet")) if ((_i = this.config.object.modelPath) == null ? void 0 : _i.includes("nanodet"))
objectRes = this.config.object.enabled ? predict9(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? predict9(img.tensor, this.config) : [];
@ -72531,7 +72552,6 @@ var Human = class {
if (this.performance.object) if (this.performance.object)
delete this.performance.object; delete this.performance.object;
} else { } else {
this.state = "run:object";
timeStamp = now(); timeStamp = now();
if ((_k = this.config.object.modelPath) == null ? void 0 : _k.includes("nanodet")) if ((_k = this.config.object.modelPath) == null ? void 0 : _k.includes("nanodet"))
objectRes = this.config.object.enabled ? await predict9(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict9(img.tensor, this.config) : [];
@ -72542,8 +72562,12 @@ var Human = class {
this.performance.object = elapsedTime; this.performance.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
this.state = "run:await";
if (this.config.yield)
await wait(1);
if (this.config.async) if (this.config.async)
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
this.state = "run:gesture";
let gestureRes = []; let gestureRes = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
@ -72554,7 +72578,6 @@ var Human = class {
delete this.performance.gesture; delete this.performance.gesture;
} }
this.performance.total = Math.trunc(now() - timeStart); this.performance.total = Math.trunc(now() - timeStart);
this.state = "idle";
const shape = ((_n = (_m = this.process) == null ? void 0 : _m.tensor) == null ? void 0 : _n.shape) || []; const shape = ((_n = (_m = this.process) == null ? void 0 : _m.tensor) == null ? void 0 : _n.shape) || [];
this.result = { this.result = {
face: faceRes, face: faceRes,
@ -72571,6 +72594,7 @@ var Human = class {
}; };
dispose(img.tensor); dispose(img.tensor);
this.emit("detect"); this.emit("detect");
this.state = "idle";
resolve(this.result); resolve(this.result);
}); });
} }

File diff suppressed because one or more lines are too long

496
dist/human.js vendored

File diff suppressed because one or more lines are too long

View File

@ -147,6 +147,10 @@ function mergeDeep(...objects) {
return prev; return prev;
}, {}); }, {});
} }
async function wait(time) {
const waiting = new Promise((resolve) => setTimeout(() => resolve(true), time));
await waiting;
}
// src/config.ts // src/config.ts
var config = { var config = {
@ -157,6 +161,7 @@ var config = {
async: true, async: true,
warmup: "full", warmup: "full",
cacheSensitivity: 0.75, cacheSensitivity: 0.75,
yield: false,
skipFrame: false, skipFrame: false,
filter: { filter: {
enabled: true, enabled: true,
@ -4482,7 +4487,7 @@ var fx;
function canvas(width, height) { function canvas(width, height) {
let c; let c;
if (env.browser) { if (env.browser) {
if (typeof OffscreenCanvas !== "undefined") { if (env.offscreen) {
c = new OffscreenCanvas(width, height); c = new OffscreenCanvas(width, height);
} else { } else {
c = document.createElement("canvas"); c = document.createElement("canvas");
@ -4505,6 +4510,8 @@ function process2(input, config3) {
throw new Error("input type is not recognized"); throw new Error("input type is not recognized");
} }
if (input instanceof tf3.Tensor) { if (input instanceof tf3.Tensor) {
if (input.isDisposed)
throw new Error("input tensor is disposed");
if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3) if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3)
tensor3 = tf3.clone(input); tensor3 = tf3.clone(input);
else else
@ -4686,6 +4693,7 @@ var env = {
agent: void 0, agent: void 0,
initial: true, initial: true,
backends: [], backends: [],
offscreen: void 0,
tfjs: { tfjs: {
version: void 0 version: void 0
}, },
@ -4741,6 +4749,7 @@ async function get() {
env.node = typeof process !== "undefined"; env.node = typeof process !== "undefined";
env.worker = env.browser ? typeof WorkerGlobalScope !== "undefined" : void 0; env.worker = env.browser ? typeof WorkerGlobalScope !== "undefined" : void 0;
env.tfjs.version = tf4.version_core; env.tfjs.version = tf4.version_core;
env.offscreen = typeof env.offscreen === "undefined" ? typeof OffscreenCanvas !== void 0 : env.offscreen;
if (typeof navigator !== "undefined") { if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g); const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) { if (raw && raw[0]) {
@ -4757,6 +4766,9 @@ async function get() {
} }
await backendInfo(); await backendInfo();
} }
async function set(obj) {
env = mergeDeep(env, obj);
}
// src/blazeface/facepipeline.ts // src/blazeface/facepipeline.ts
var leftOutline = MESH_ANNOTATIONS["leftEyeLower0"]; var leftOutline = MESH_ANNOTATIONS["leftEyeLower0"];
@ -11267,6 +11279,8 @@ function extensions() {
} }
async function register(instance) { async function register(instance) {
var _a; var _a;
if (instance.config.backend !== "humangl")
return;
if (config2.name in tf21.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) { if (config2.name in tf21.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) {
log("error: humangl backend invalid context"); log("error: humangl backend invalid context");
reset(instance); reset(instance);
@ -11304,11 +11318,12 @@ async function register(instance) {
log("error: cannot set WebGL context:", err); log("error: cannot set WebGL context:", err);
return; return;
} }
const current = tf21.backend().getGPGPUContext().gl; const current = tf21.backend().getGPGPUContext ? tf21.backend().getGPGPUContext().gl : null;
if (current) { if (current) {
log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`); log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`);
} else { } else {
log("error: no current context:", current, config2.gl); log("error: no current gl context:", current, config2.gl);
return;
} }
try { try {
const ctx = new tf21.GPGPUContext(config2.gl); const ctx = new tf21.GPGPUContext(config2.gl);
@ -12376,6 +12391,7 @@ var Human = class {
} }
init() { init() {
check(this); check(this);
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -12418,6 +12434,8 @@ var Human = class {
return warmup(this, userConfig); return warmup(this, userConfig);
} }
async detect(input, userConfig) { async detect(input, userConfig) {
if (this.config.yield)
await wait(1);
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
this.state = "config"; this.state = "config";
@ -12433,6 +12451,8 @@ var Human = class {
const timeStart = now(); const timeStart = now();
await check(this); await check(this);
await this.load(); await this.load();
if (this.config.yield)
await wait(1);
timeStamp = now(); timeStamp = now();
let img = process2(input, this.config); let img = process2(input, this.config);
this.process = img; this.process = img;
@ -12474,12 +12494,12 @@ var Human = class {
let bodyRes = []; let bodyRes = [];
let handRes = []; let handRes = [];
let objectRes = []; let objectRes = [];
this.state = "run:face";
if (this.config.async) { if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? detectFace(this, img.tensor) : [];
if (this.performance.face) if (this.performance.face)
delete this.performance.face; delete this.performance.face;
} else { } else {
this.state = "run:face";
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -12487,6 +12507,7 @@ var Human = class {
this.performance.face = elapsedTime; this.performance.face = elapsedTime;
} }
this.analyze("Start Body:"); this.analyze("Start Body:");
this.state = "run:body";
if (this.config.async) { if (this.config.async) {
if ((_a = this.config.body.modelPath) == null ? void 0 : _a.includes("posenet")) if ((_a = this.config.body.modelPath) == null ? void 0 : _a.includes("posenet"))
bodyRes = this.config.body.enabled ? predict4(img.tensor, this.config) : []; bodyRes = this.config.body.enabled ? predict4(img.tensor, this.config) : [];
@ -12499,7 +12520,6 @@ var Human = class {
if (this.performance.body) if (this.performance.body)
delete this.performance.body; delete this.performance.body;
} else { } else {
this.state = "run:body";
timeStamp = now(); timeStamp = now();
if ((_e = this.config.body.modelPath) == null ? void 0 : _e.includes("posenet")) if ((_e = this.config.body.modelPath) == null ? void 0 : _e.includes("posenet"))
bodyRes = this.config.body.enabled ? await predict4(img.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict4(img.tensor, this.config) : [];
@ -12515,12 +12535,12 @@ var Human = class {
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
this.state = "run:hand";
if (this.config.async) { if (this.config.async) {
handRes = this.config.hand.enabled ? predict5(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? predict5(img.tensor, this.config) : [];
if (this.performance.hand) if (this.performance.hand)
delete this.performance.hand; delete this.performance.hand;
} else { } else {
this.state = "run:hand";
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? await predict5(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -12529,6 +12549,7 @@ var Human = class {
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
this.state = "run:object";
if (this.config.async) { if (this.config.async) {
if ((_i = this.config.object.modelPath) == null ? void 0 : _i.includes("nanodet")) if ((_i = this.config.object.modelPath) == null ? void 0 : _i.includes("nanodet"))
objectRes = this.config.object.enabled ? predict9(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? predict9(img.tensor, this.config) : [];
@ -12537,7 +12558,6 @@ var Human = class {
if (this.performance.object) if (this.performance.object)
delete this.performance.object; delete this.performance.object;
} else { } else {
this.state = "run:object";
timeStamp = now(); timeStamp = now();
if ((_k = this.config.object.modelPath) == null ? void 0 : _k.includes("nanodet")) if ((_k = this.config.object.modelPath) == null ? void 0 : _k.includes("nanodet"))
objectRes = this.config.object.enabled ? await predict9(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict9(img.tensor, this.config) : [];
@ -12548,8 +12568,12 @@ var Human = class {
this.performance.object = elapsedTime; this.performance.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
this.state = "run:await";
if (this.config.yield)
await wait(1);
if (this.config.async) if (this.config.async)
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
this.state = "run:gesture";
let gestureRes = []; let gestureRes = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
@ -12560,7 +12584,6 @@ var Human = class {
delete this.performance.gesture; delete this.performance.gesture;
} }
this.performance.total = Math.trunc(now() - timeStart); this.performance.total = Math.trunc(now() - timeStart);
this.state = "idle";
const shape = ((_n = (_m = this.process) == null ? void 0 : _m.tensor) == null ? void 0 : _n.shape) || []; const shape = ((_n = (_m = this.process) == null ? void 0 : _m.tensor) == null ? void 0 : _n.shape) || [];
this.result = { this.result = {
face: faceRes, face: faceRes,
@ -12577,6 +12600,7 @@ var Human = class {
}; };
tf24.dispose(img.tensor); tf24.dispose(img.tensor);
this.emit("detect"); this.emit("detect");
this.state = "idle";
resolve(this.result); resolve(this.result);
}); });
} }

View File

@ -148,6 +148,10 @@ function mergeDeep(...objects) {
return prev; return prev;
}, {}); }, {});
} }
async function wait(time) {
const waiting = new Promise((resolve) => setTimeout(() => resolve(true), time));
await waiting;
}
// src/config.ts // src/config.ts
var config = { var config = {
@ -158,6 +162,7 @@ var config = {
async: true, async: true,
warmup: "full", warmup: "full",
cacheSensitivity: 0.75, cacheSensitivity: 0.75,
yield: false,
skipFrame: false, skipFrame: false,
filter: { filter: {
enabled: true, enabled: true,
@ -4483,7 +4488,7 @@ var fx;
function canvas(width, height) { function canvas(width, height) {
let c; let c;
if (env.browser) { if (env.browser) {
if (typeof OffscreenCanvas !== "undefined") { if (env.offscreen) {
c = new OffscreenCanvas(width, height); c = new OffscreenCanvas(width, height);
} else { } else {
c = document.createElement("canvas"); c = document.createElement("canvas");
@ -4506,6 +4511,8 @@ function process2(input, config3) {
throw new Error("input type is not recognized"); throw new Error("input type is not recognized");
} }
if (input instanceof tf3.Tensor) { if (input instanceof tf3.Tensor) {
if (input.isDisposed)
throw new Error("input tensor is disposed");
if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3) if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3)
tensor3 = tf3.clone(input); tensor3 = tf3.clone(input);
else else
@ -4687,6 +4694,7 @@ var env = {
agent: void 0, agent: void 0,
initial: true, initial: true,
backends: [], backends: [],
offscreen: void 0,
tfjs: { tfjs: {
version: void 0 version: void 0
}, },
@ -4742,6 +4750,7 @@ async function get() {
env.node = typeof process !== "undefined"; env.node = typeof process !== "undefined";
env.worker = env.browser ? typeof WorkerGlobalScope !== "undefined" : void 0; env.worker = env.browser ? typeof WorkerGlobalScope !== "undefined" : void 0;
env.tfjs.version = tf4.version_core; env.tfjs.version = tf4.version_core;
env.offscreen = typeof env.offscreen === "undefined" ? typeof OffscreenCanvas !== void 0 : env.offscreen;
if (typeof navigator !== "undefined") { if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g); const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) { if (raw && raw[0]) {
@ -4758,6 +4767,9 @@ async function get() {
} }
await backendInfo(); await backendInfo();
} }
async function set(obj) {
env = mergeDeep(env, obj);
}
// src/blazeface/facepipeline.ts // src/blazeface/facepipeline.ts
var leftOutline = MESH_ANNOTATIONS["leftEyeLower0"]; var leftOutline = MESH_ANNOTATIONS["leftEyeLower0"];
@ -11268,6 +11280,8 @@ function extensions() {
} }
async function register(instance) { async function register(instance) {
var _a; var _a;
if (instance.config.backend !== "humangl")
return;
if (config2.name in tf21.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) { if (config2.name in tf21.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) {
log("error: humangl backend invalid context"); log("error: humangl backend invalid context");
reset(instance); reset(instance);
@ -11305,11 +11319,12 @@ async function register(instance) {
log("error: cannot set WebGL context:", err); log("error: cannot set WebGL context:", err);
return; return;
} }
const current = tf21.backend().getGPGPUContext().gl; const current = tf21.backend().getGPGPUContext ? tf21.backend().getGPGPUContext().gl : null;
if (current) { if (current) {
log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`); log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`);
} else { } else {
log("error: no current context:", current, config2.gl); log("error: no current gl context:", current, config2.gl);
return;
} }
try { try {
const ctx = new tf21.GPGPUContext(config2.gl); const ctx = new tf21.GPGPUContext(config2.gl);
@ -12377,6 +12392,7 @@ var Human = class {
} }
init() { init() {
check(this); check(this);
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -12419,6 +12435,8 @@ var Human = class {
return warmup(this, userConfig); return warmup(this, userConfig);
} }
async detect(input, userConfig) { async detect(input, userConfig) {
if (this.config.yield)
await wait(1);
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
this.state = "config"; this.state = "config";
@ -12434,6 +12452,8 @@ var Human = class {
const timeStart = now(); const timeStart = now();
await check(this); await check(this);
await this.load(); await this.load();
if (this.config.yield)
await wait(1);
timeStamp = now(); timeStamp = now();
let img = process2(input, this.config); let img = process2(input, this.config);
this.process = img; this.process = img;
@ -12475,12 +12495,12 @@ var Human = class {
let bodyRes = []; let bodyRes = [];
let handRes = []; let handRes = [];
let objectRes = []; let objectRes = [];
this.state = "run:face";
if (this.config.async) { if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? detectFace(this, img.tensor) : [];
if (this.performance.face) if (this.performance.face)
delete this.performance.face; delete this.performance.face;
} else { } else {
this.state = "run:face";
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -12488,6 +12508,7 @@ var Human = class {
this.performance.face = elapsedTime; this.performance.face = elapsedTime;
} }
this.analyze("Start Body:"); this.analyze("Start Body:");
this.state = "run:body";
if (this.config.async) { if (this.config.async) {
if ((_a = this.config.body.modelPath) == null ? void 0 : _a.includes("posenet")) if ((_a = this.config.body.modelPath) == null ? void 0 : _a.includes("posenet"))
bodyRes = this.config.body.enabled ? predict4(img.tensor, this.config) : []; bodyRes = this.config.body.enabled ? predict4(img.tensor, this.config) : [];
@ -12500,7 +12521,6 @@ var Human = class {
if (this.performance.body) if (this.performance.body)
delete this.performance.body; delete this.performance.body;
} else { } else {
this.state = "run:body";
timeStamp = now(); timeStamp = now();
if ((_e = this.config.body.modelPath) == null ? void 0 : _e.includes("posenet")) if ((_e = this.config.body.modelPath) == null ? void 0 : _e.includes("posenet"))
bodyRes = this.config.body.enabled ? await predict4(img.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict4(img.tensor, this.config) : [];
@ -12516,12 +12536,12 @@ var Human = class {
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
this.state = "run:hand";
if (this.config.async) { if (this.config.async) {
handRes = this.config.hand.enabled ? predict5(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? predict5(img.tensor, this.config) : [];
if (this.performance.hand) if (this.performance.hand)
delete this.performance.hand; delete this.performance.hand;
} else { } else {
this.state = "run:hand";
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? await predict5(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -12530,6 +12550,7 @@ var Human = class {
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
this.state = "run:object";
if (this.config.async) { if (this.config.async) {
if ((_i = this.config.object.modelPath) == null ? void 0 : _i.includes("nanodet")) if ((_i = this.config.object.modelPath) == null ? void 0 : _i.includes("nanodet"))
objectRes = this.config.object.enabled ? predict9(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? predict9(img.tensor, this.config) : [];
@ -12538,7 +12559,6 @@ var Human = class {
if (this.performance.object) if (this.performance.object)
delete this.performance.object; delete this.performance.object;
} else { } else {
this.state = "run:object";
timeStamp = now(); timeStamp = now();
if ((_k = this.config.object.modelPath) == null ? void 0 : _k.includes("nanodet")) if ((_k = this.config.object.modelPath) == null ? void 0 : _k.includes("nanodet"))
objectRes = this.config.object.enabled ? await predict9(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict9(img.tensor, this.config) : [];
@ -12549,8 +12569,12 @@ var Human = class {
this.performance.object = elapsedTime; this.performance.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
this.state = "run:await";
if (this.config.yield)
await wait(1);
if (this.config.async) if (this.config.async)
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
this.state = "run:gesture";
let gestureRes = []; let gestureRes = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
@ -12561,7 +12585,6 @@ var Human = class {
delete this.performance.gesture; delete this.performance.gesture;
} }
this.performance.total = Math.trunc(now() - timeStart); this.performance.total = Math.trunc(now() - timeStart);
this.state = "idle";
const shape = ((_n = (_m = this.process) == null ? void 0 : _m.tensor) == null ? void 0 : _n.shape) || []; const shape = ((_n = (_m = this.process) == null ? void 0 : _m.tensor) == null ? void 0 : _n.shape) || [];
this.result = { this.result = {
face: faceRes, face: faceRes,
@ -12578,6 +12601,7 @@ var Human = class {
}; };
tf24.dispose(img.tensor); tf24.dispose(img.tensor);
this.emit("detect"); this.emit("detect");
this.state = "idle";
resolve(this.result); resolve(this.result);
}); });
} }

40
dist/human.node.js vendored
View File

@ -147,6 +147,10 @@ function mergeDeep(...objects) {
return prev; return prev;
}, {}); }, {});
} }
async function wait(time) {
const waiting = new Promise((resolve) => setTimeout(() => resolve(true), time));
await waiting;
}
// src/config.ts // src/config.ts
var config = { var config = {
@ -157,6 +161,7 @@ var config = {
async: true, async: true,
warmup: "full", warmup: "full",
cacheSensitivity: 0.75, cacheSensitivity: 0.75,
yield: false,
skipFrame: false, skipFrame: false,
filter: { filter: {
enabled: true, enabled: true,
@ -4482,7 +4487,7 @@ var fx;
function canvas(width, height) { function canvas(width, height) {
let c; let c;
if (env.browser) { if (env.browser) {
if (typeof OffscreenCanvas !== "undefined") { if (env.offscreen) {
c = new OffscreenCanvas(width, height); c = new OffscreenCanvas(width, height);
} else { } else {
c = document.createElement("canvas"); c = document.createElement("canvas");
@ -4505,6 +4510,8 @@ function process2(input, config3) {
throw new Error("input type is not recognized"); throw new Error("input type is not recognized");
} }
if (input instanceof tf3.Tensor) { if (input instanceof tf3.Tensor) {
if (input.isDisposed)
throw new Error("input tensor is disposed");
if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3) if (input.shape && input.shape.length === 4 && input.shape[0] === 1 && input.shape[3] === 3)
tensor3 = tf3.clone(input); tensor3 = tf3.clone(input);
else else
@ -4686,6 +4693,7 @@ var env = {
agent: void 0, agent: void 0,
initial: true, initial: true,
backends: [], backends: [],
offscreen: void 0,
tfjs: { tfjs: {
version: void 0 version: void 0
}, },
@ -4741,6 +4749,7 @@ async function get() {
env.node = typeof process !== "undefined"; env.node = typeof process !== "undefined";
env.worker = env.browser ? typeof WorkerGlobalScope !== "undefined" : void 0; env.worker = env.browser ? typeof WorkerGlobalScope !== "undefined" : void 0;
env.tfjs.version = tf4.version_core; env.tfjs.version = tf4.version_core;
env.offscreen = typeof env.offscreen === "undefined" ? typeof OffscreenCanvas !== void 0 : env.offscreen;
if (typeof navigator !== "undefined") { if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g); const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) { if (raw && raw[0]) {
@ -4757,6 +4766,9 @@ async function get() {
} }
await backendInfo(); await backendInfo();
} }
async function set(obj) {
env = mergeDeep(env, obj);
}
// src/blazeface/facepipeline.ts // src/blazeface/facepipeline.ts
var leftOutline = MESH_ANNOTATIONS["leftEyeLower0"]; var leftOutline = MESH_ANNOTATIONS["leftEyeLower0"];
@ -11267,6 +11279,8 @@ function extensions() {
} }
async function register(instance) { async function register(instance) {
var _a; var _a;
if (instance.config.backend !== "humangl")
return;
if (config2.name in tf21.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) { if (config2.name in tf21.engine().registry && (!config2.gl || !config2.gl.getParameter(config2.gl.VERSION))) {
log("error: humangl backend invalid context"); log("error: humangl backend invalid context");
reset(instance); reset(instance);
@ -11304,11 +11318,12 @@ async function register(instance) {
log("error: cannot set WebGL context:", err); log("error: cannot set WebGL context:", err);
return; return;
} }
const current = tf21.backend().getGPGPUContext().gl; const current = tf21.backend().getGPGPUContext ? tf21.backend().getGPGPUContext().gl : null;
if (current) { if (current) {
log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`); log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`);
} else { } else {
log("error: no current context:", current, config2.gl); log("error: no current gl context:", current, config2.gl);
return;
} }
try { try {
const ctx = new tf21.GPGPUContext(config2.gl); const ctx = new tf21.GPGPUContext(config2.gl);
@ -12376,6 +12391,7 @@ var Human = class {
} }
init() { init() {
check(this); check(this);
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -12418,6 +12434,8 @@ var Human = class {
return warmup(this, userConfig); return warmup(this, userConfig);
} }
async detect(input, userConfig) { async detect(input, userConfig) {
if (this.config.yield)
await wait(1);
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
this.state = "config"; this.state = "config";
@ -12433,6 +12451,8 @@ var Human = class {
const timeStart = now(); const timeStart = now();
await check(this); await check(this);
await this.load(); await this.load();
if (this.config.yield)
await wait(1);
timeStamp = now(); timeStamp = now();
let img = process2(input, this.config); let img = process2(input, this.config);
this.process = img; this.process = img;
@ -12474,12 +12494,12 @@ var Human = class {
let bodyRes = []; let bodyRes = [];
let handRes = []; let handRes = [];
let objectRes = []; let objectRes = [];
this.state = "run:face";
if (this.config.async) { if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? detectFace(this, img.tensor) : [];
if (this.performance.face) if (this.performance.face)
delete this.performance.face; delete this.performance.face;
} else { } else {
this.state = "run:face";
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -12487,6 +12507,7 @@ var Human = class {
this.performance.face = elapsedTime; this.performance.face = elapsedTime;
} }
this.analyze("Start Body:"); this.analyze("Start Body:");
this.state = "run:body";
if (this.config.async) { if (this.config.async) {
if ((_a = this.config.body.modelPath) == null ? void 0 : _a.includes("posenet")) if ((_a = this.config.body.modelPath) == null ? void 0 : _a.includes("posenet"))
bodyRes = this.config.body.enabled ? predict4(img.tensor, this.config) : []; bodyRes = this.config.body.enabled ? predict4(img.tensor, this.config) : [];
@ -12499,7 +12520,6 @@ var Human = class {
if (this.performance.body) if (this.performance.body)
delete this.performance.body; delete this.performance.body;
} else { } else {
this.state = "run:body";
timeStamp = now(); timeStamp = now();
if ((_e = this.config.body.modelPath) == null ? void 0 : _e.includes("posenet")) if ((_e = this.config.body.modelPath) == null ? void 0 : _e.includes("posenet"))
bodyRes = this.config.body.enabled ? await predict4(img.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict4(img.tensor, this.config) : [];
@ -12515,12 +12535,12 @@ var Human = class {
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
this.state = "run:hand";
if (this.config.async) { if (this.config.async) {
handRes = this.config.hand.enabled ? predict5(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? predict5(img.tensor, this.config) : [];
if (this.performance.hand) if (this.performance.hand)
delete this.performance.hand; delete this.performance.hand;
} else { } else {
this.state = "run:hand";
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? await predict5(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -12529,6 +12549,7 @@ var Human = class {
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
this.state = "run:object";
if (this.config.async) { if (this.config.async) {
if ((_i = this.config.object.modelPath) == null ? void 0 : _i.includes("nanodet")) if ((_i = this.config.object.modelPath) == null ? void 0 : _i.includes("nanodet"))
objectRes = this.config.object.enabled ? predict9(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? predict9(img.tensor, this.config) : [];
@ -12537,7 +12558,6 @@ var Human = class {
if (this.performance.object) if (this.performance.object)
delete this.performance.object; delete this.performance.object;
} else { } else {
this.state = "run:object";
timeStamp = now(); timeStamp = now();
if ((_k = this.config.object.modelPath) == null ? void 0 : _k.includes("nanodet")) if ((_k = this.config.object.modelPath) == null ? void 0 : _k.includes("nanodet"))
objectRes = this.config.object.enabled ? await predict9(img.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict9(img.tensor, this.config) : [];
@ -12548,8 +12568,12 @@ var Human = class {
this.performance.object = elapsedTime; this.performance.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
this.state = "run:await";
if (this.config.yield)
await wait(1);
if (this.config.async) if (this.config.async)
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
this.state = "run:gesture";
let gestureRes = []; let gestureRes = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
@ -12560,7 +12584,6 @@ var Human = class {
delete this.performance.gesture; delete this.performance.gesture;
} }
this.performance.total = Math.trunc(now() - timeStart); this.performance.total = Math.trunc(now() - timeStart);
this.state = "idle";
const shape = ((_n = (_m = this.process) == null ? void 0 : _m.tensor) == null ? void 0 : _n.shape) || []; const shape = ((_n = (_m = this.process) == null ? void 0 : _m.tensor) == null ? void 0 : _n.shape) || [];
this.result = { this.result = {
face: faceRes, face: faceRes,
@ -12577,6 +12600,7 @@ var Human = class {
}; };
tf24.dispose(img.tensor); tf24.dispose(img.tensor);
this.emit("detect"); this.emit("detect");
this.state = "idle";
resolve(this.result); resolve(this.result);
}); });
} }

View File

@ -24,7 +24,7 @@
"start": "node --no-warnings demo/nodejs/node.js", "start": "node --no-warnings demo/nodejs/node.js",
"dev": "build --profile development", "dev": "build --profile development",
"build": "rimraf test/build.log && build --profile production", "build": "rimraf test/build.log && build --profile production",
"test": "node --no-warnings --unhandled-rejections=strict --trace-uncaught test/test.js", "test": "node --no-warnings --unhandled-rejections=strict --trace-uncaught test/node.js",
"lint": "eslint src demo test", "lint": "eslint src demo test",
"scan": "npx auditjs@latest ossi --dev --quiet" "scan": "npx auditjs@latest ossi --dev --quiet"
}, },

View File

@ -216,10 +216,10 @@ export interface Config {
*/ */
cacheSensitivity: number; cacheSensitivity: number;
/** Cache sensitivity /** Yield to main thread periodically */
* - values 0..1 where 0.01 means reset cache if input changed more than 1% yield: boolean;
* - set to 0 to disable caching
*/ /** Internal Variable */
skipFrame: boolean; skipFrame: boolean;
/** Run input through image filters before inference /** Run input through image filters before inference
@ -262,6 +262,7 @@ const config: Config = {
cacheSensitivity: 0.75, // cache sensitivity cacheSensitivity: 0.75, // cache sensitivity
// values 0..1 where 0.01 means reset cache if input changed more than 1% // values 0..1 where 0.01 means reset cache if input changed more than 1%
// set to 0 to disable caching // set to 0 to disable caching
yield: false, // yield to main thread periodically
skipFrame: false, // internal & dynamic skipFrame: false, // internal & dynamic
filter: { // run input through image filters before inference filter: { // run input through image filters before inference
// image filters run with near-zero latency as they are executed on the GPU // image filters run with near-zero latency as they are executed on the GPU

View File

@ -1,7 +1,8 @@
import * as tf from '../dist/tfjs.esm.js'; import * as tf from '../dist/tfjs.esm.js';
import * as image from './image/image'; import * as image from './image/image';
import { mergeDeep } from './helpers';
export interface Env { export type Env = {
browser: undefined | boolean, browser: undefined | boolean,
node: undefined | boolean, node: undefined | boolean,
worker: undefined | boolean, worker: undefined | boolean,
@ -12,6 +13,7 @@ export interface Env {
tfjs: { tfjs: {
version: undefined | string, version: undefined | string,
}, },
offscreen: undefined | boolean,
wasm: { wasm: {
supported: undefined | boolean, supported: undefined | boolean,
backend: undefined | boolean, backend: undefined | boolean,
@ -34,7 +36,8 @@ export interface Env {
Image: undefined, Image: undefined,
} }
export const env: Env = { // eslint-disable-next-line import/no-mutable-exports
export let env: Env = {
browser: undefined, browser: undefined,
node: undefined, node: undefined,
worker: undefined, worker: undefined,
@ -42,6 +45,7 @@ export const env: Env = {
agent: undefined, agent: undefined,
initial: true, initial: true,
backends: [], backends: [],
offscreen: undefined,
tfjs: { tfjs: {
version: undefined, version: undefined,
}, },
@ -127,6 +131,8 @@ export async function get() {
env.worker = env.browser ? (typeof WorkerGlobalScope !== 'undefined') : undefined; env.worker = env.browser ? (typeof WorkerGlobalScope !== 'undefined') : undefined;
env.tfjs.version = tf.version_core; env.tfjs.version = tf.version_core;
// offscreencanvas supported?
env.offscreen = typeof env.offscreen === 'undefined' ? typeof OffscreenCanvas !== undefined : env.offscreen;
// get platform and agent // get platform and agent
if (typeof navigator !== 'undefined') { if (typeof navigator !== 'undefined') {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g); const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
@ -141,9 +147,12 @@ export async function get() {
env.platform = `${process.platform} ${process.arch}`; env.platform = `${process.platform} ${process.arch}`;
env.agent = `NodeJS ${process.version}`; env.agent = `NodeJS ${process.version}`;
} }
await backendInfo(); await backendInfo();
// get cpu info // get cpu info
// await cpuInfo(); // await cpuInfo();
} }
export async function set(obj) {
env = mergeDeep(env, obj);
}

View File

@ -63,3 +63,9 @@ export const minmax = (data: Array<number>) => data.reduce((acc: Array<number>,
acc[1] = (acc[1] === undefined || val > acc[1]) ? val : acc[1]; acc[1] = (acc[1] === undefined || val > acc[1]) ? val : acc[1];
return acc; return acc;
}, []); }, []);
// helper function: async wait
export async function wait(time) {
const waiting = new Promise((resolve) => setTimeout(() => resolve(true), time));
await waiting;
}

View File

@ -2,7 +2,7 @@
* Human main module * Human main module
*/ */
import { log, now, mergeDeep, validate } from './helpers'; import { log, now, mergeDeep, validate, wait } from './helpers';
import { Config, defaults } from './config'; import { Config, defaults } from './config';
import type { Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult } from './result'; import type { Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult } from './result';
import * as tf from '../dist/tfjs.esm.js'; import * as tf from '../dist/tfjs.esm.js';
@ -35,7 +35,7 @@ import type { DrawOptions } from './draw';
export * from './config'; export * from './config';
export * from './result'; export * from './result';
export type { DrawOptions } from './draw'; export type { DrawOptions } from './draw';
export { env } from './env'; export { env, Env } from './env';
/** Defines all possible input types for **Human** detection /** Defines all possible input types for **Human** detection
* @typedef Input Type * @typedef Input Type
@ -320,6 +320,7 @@ export class Human {
*/ */
init() { init() {
backend.check(this); backend.check(this);
env.set(this.env);
} }
/** Load method preloads all configured models on-demand /** Load method preloads all configured models on-demand
@ -395,6 +396,7 @@ export class Human {
*/ */
async detect(input: Input, userConfig?: Partial<Config>): Promise<Result | Error> { async detect(input: Input, userConfig?: Partial<Config>): Promise<Result | Error> {
// detection happens inside a promise // detection happens inside a promise
if (this.config.yield) await wait(1);
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
this.state = 'config'; this.state = 'config';
let timeStamp; let timeStamp;
@ -419,6 +421,7 @@ export class Human {
// load models if enabled // load models if enabled
await this.load(); await this.load();
if (this.config.yield) await wait(1);
timeStamp = now(); timeStamp = now();
let img = image.process(input, this.config); let img = image.process(input, this.config);
this.process = img; this.process = img;
@ -465,11 +468,11 @@ export class Human {
let objectRes: ObjectResult[] | Promise<ObjectResult[]> | never[] = []; let objectRes: ObjectResult[] | Promise<ObjectResult[]> | never[] = [];
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
this.state = 'run:face';
if (this.config.async) { if (this.config.async) {
faceRes = this.config.face.enabled ? face.detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? face.detectFace(this, img.tensor) : [];
if (this.performance.face) delete this.performance.face; if (this.performance.face) delete this.performance.face;
} else { } else {
this.state = 'run:face';
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await face.detectFace(this, img.tensor) : []; faceRes = this.config.face.enabled ? await face.detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -478,6 +481,7 @@ export class Human {
// run body: can be posenet, blazepose, efficientpose, movenet // run body: can be posenet, blazepose, efficientpose, movenet
this.analyze('Start Body:'); this.analyze('Start Body:');
this.state = 'run:body';
if (this.config.async) { if (this.config.async) {
if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? posenet.predict(img.tensor, this.config) : []; if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? posenet.predict(img.tensor, this.config) : [];
else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(img.tensor, this.config) : []; else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(img.tensor, this.config) : [];
@ -485,7 +489,6 @@ export class Human {
else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? movenet.predict(img.tensor, this.config) : []; else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? movenet.predict(img.tensor, this.config) : [];
if (this.performance.body) delete this.performance.body; if (this.performance.body) delete this.performance.body;
} else { } else {
this.state = 'run:body';
timeStamp = now(); timeStamp = now();
if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(img.tensor, this.config) : []; if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(img.tensor, this.config) : [];
else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(img.tensor, this.config) : []; else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(img.tensor, this.config) : [];
@ -498,11 +501,11 @@ export class Human {
// run handpose // run handpose
this.analyze('Start Hand:'); this.analyze('Start Hand:');
this.state = 'run:hand';
if (this.config.async) { if (this.config.async) {
handRes = this.config.hand.enabled ? handpose.predict(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? handpose.predict(img.tensor, this.config) : [];
if (this.performance.hand) delete this.performance.hand; if (this.performance.hand) delete this.performance.hand;
} else { } else {
this.state = 'run:hand';
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await handpose.predict(img.tensor, this.config) : []; handRes = this.config.hand.enabled ? await handpose.predict(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
@ -512,12 +515,12 @@ export class Human {
// run nanodet // run nanodet
this.analyze('Start Object:'); this.analyze('Start Object:');
this.state = 'run:object';
if (this.config.async) { if (this.config.async) {
if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? nanodet.predict(img.tensor, this.config) : []; if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? nanodet.predict(img.tensor, this.config) : [];
else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? centernet.predict(img.tensor, this.config) : []; else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? centernet.predict(img.tensor, this.config) : [];
if (this.performance.object) delete this.performance.object; if (this.performance.object) delete this.performance.object;
} else { } else {
this.state = 'run:object';
timeStamp = now(); timeStamp = now();
if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(img.tensor, this.config) : []; if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(img.tensor, this.config) : [];
else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(img.tensor, this.config) : []; else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(img.tensor, this.config) : [];
@ -527,9 +530,12 @@ export class Human {
this.analyze('End Object:'); this.analyze('End Object:');
// if async wait for results // if async wait for results
this.state = 'run:await';
if (this.config.yield) await wait(1);
if (this.config.async) [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); if (this.config.async) [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
// run gesture analysis last // run gesture analysis last
this.state = 'run:gesture';
let gestureRes: GestureResult[] = []; let gestureRes: GestureResult[] = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
@ -539,7 +545,6 @@ export class Human {
} }
this.performance.total = Math.trunc(now() - timeStart); this.performance.total = Math.trunc(now() - timeStart);
this.state = 'idle';
const shape = this.process?.tensor?.shape || []; const shape = this.process?.tensor?.shape || [];
this.result = { this.result = {
face: faceRes as FaceResult[], face: faceRes as FaceResult[],
@ -558,6 +563,7 @@ export class Human {
// log('Result:', result); // log('Result:', result);
this.emit('detect'); this.emit('detect');
this.state = 'idle';
resolve(this.result); resolve(this.result);
}); });
} }

View File

@ -21,7 +21,7 @@ let fx: fxImage.GLImageFilter | null; // instance of imagefx
export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas { export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas {
let c; let c;
if (env.browser) { if (env.browser) {
if (typeof OffscreenCanvas !== 'undefined') { if (env.offscreen) {
c = new OffscreenCanvas(width, height); c = new OffscreenCanvas(width, height);
} else { } else {
c = document.createElement('canvas'); c = document.createElement('canvas');
@ -63,6 +63,7 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
} }
if (input instanceof tf.Tensor) { if (input instanceof tf.Tensor) {
// if input is tensor, use as-is // if input is tensor, use as-is
if (input.isDisposed) throw new Error('input tensor is disposed');
if ((input as unknown as Tensor).shape && (input as unknown as Tensor).shape.length === 4 && (input as unknown as Tensor).shape[0] === 1 && (input as unknown as Tensor).shape[3] === 3) tensor = tf.clone(input); if ((input as unknown as Tensor).shape && (input as unknown as Tensor).shape.length === 4 && (input as unknown as Tensor).shape[0] === 1 && (input as unknown as Tensor).shape[3] === 3) tensor = tf.clone(input);
else throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${(input as unknown as Tensor).shape}`); else throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${(input as unknown as Tensor).shape}`);
} else { } else {

View File

@ -45,6 +45,7 @@ function extensions(): void {
*/ */
export async function register(instance): Promise<void> { export async function register(instance): Promise<void> {
// force backend reload if gl context is not valid // force backend reload if gl context is not valid
if (instance.config.backend !== 'humangl') return;
if ((config.name in tf.engine().registry) && (!config.gl || !config.gl.getParameter(config.gl.VERSION))) { if ((config.name in tf.engine().registry) && (!config.gl || !config.gl.getParameter(config.gl.VERSION))) {
log('error: humangl backend invalid context'); log('error: humangl backend invalid context');
models.reset(instance); models.reset(instance);
@ -95,11 +96,12 @@ export async function register(instance): Promise<void> {
log('error: cannot set WebGL context:', err); log('error: cannot set WebGL context:', err);
return; return;
} }
const current = tf.backend().getGPGPUContext().gl; const current = tf.backend().getGPGPUContext ? tf.backend().getGPGPUContext().gl : null;
if (current) { if (current) {
log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`); log(`humangl webgl version:${current.getParameter(current.VERSION)} renderer:${current.getParameter(current.RENDERER)}`);
} else { } else {
log('error: no current context:', current, config.gl); log('error: no current gl context:', current, config.gl);
return;
} }
try { try {
const ctx = new tf.GPGPUContext(config.gl); const ctx = new tf.GPGPUContext(config.gl);

146
test/browser.html Normal file
View File

@ -0,0 +1,146 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>Human Browser Tests</title>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="viewport" content="width=device-width, shrink-to-fit=yes">
<meta name="keywords" content="Human">
<meta name="application-name" content="Human">
<meta name="description" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<meta name="msapplication-tooltip" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<link rel="shortcut icon" href="../../favicon.ico" type="image/x-icon">
<link rel="apple-touch-icon" href="../../assets/icon.png">
<style>
@font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 100; src: local('Lato'), url('../../assets/lato-light.woff2') }
html { font-family: 'Lato', 'Segoe UI'; font-size: 14px; font-variant: small-caps; }
body { margin: 0; background: black; color: white; }
canvas { position: absolute; bottom: 10px; right: 10px; width: 256px; height: 256px; }
pre { line-height: 150%; }
.events { position: absolute; top: 10px; right: 10px; width: 12rem; height: 1.25rem; background-color: grey; padding: 8px; }
.state { position: absolute; top: 60px; right: 10px; width: 12rem; height: 1.25rem; background-color: grey; padding: 8px; }
</style>
</head>
<body>
<pre id="log"></pre>
<div id="events" class="events"></div>
<div id="state" class="state"></div>
<script type="module">
import Human from '../dist/human.esm.js';
const config = {
async: true,
warmup: 'full',
debug: true,
cacheSensitivity: 0,
object: { enabled: true },
}
const backends = ['wasm', 'webgl', 'humangl'];
// const backends = ['humangl'];
const start = performance.now();
function str(...msg) {
if (!Array.isArray(msg)) return msg;
let line = '';
for (const entry of msg) {
if (typeof entry === 'object') line += JSON.stringify(entry).replace(/"/g, '').replace(/,/g, ', ').replace(/:/g, ': ');
else line += entry;
}
return line + '\n';
}
async function log(...msgs) {
document.getElementById('log').innerHTML += str(...msgs);
console.log(...msgs);
}
async function image(url) {
const el = document.createElement('img');
el.id = 'image';
const loaded = new Promise((resolve) => { el.onload = () => resolve(true) });
el.src = url;
await loaded;
return el;
}
async function wait(time) {
const waiting = new Promise((resolve) => setTimeout(() => resolve(), time));
await waiting;
}
async function events(event) {
document.getElementById('events').innerText = `${Math.round(performance.now() - start)}ms Event: ${event}`;
}
async function main() {
log('human tests');
let res;
let human = new Human(config);
setInterval(() => { document.getElementById('state').innerText = `State: ${human.state}`; }, 10);
log({ version: human.version });
log({ env: human.env });
log({ config: human.config });
for (const backend of backends) {
log('');
log('test start:', backend);
human.config.backend = backend;
human = new Human(config);
human.events.addEventListener('warmup', () => events('warmup'));
human.events.addEventListener('image', () => events('image'));
human.events.addEventListener('detect', () => events('detect'));
await human.load();
human.env.offscreen = false;
human.env.initial = false;
await human.init();
log({ tfjs: human.tf.version.tfjs, backend: human.tf.getBackend() });
const models = Object.keys(human.models).map((model) => ({ name: model, loaded: (human.models[model] !== null) }));
log({ models: { models }});
log({ memory: human.tf.engine().state });
res = await human.validate();
log({ validate: res });
res = await human.warmup();
log({ warmup: res });
let img = await image('../../samples/ai-body.jpg');
const input = await human.image(img);
let node = document.body.appendChild(res.canvas);
await wait(100);
log({ input });
res = await human.detect(input.tensor);
log({ detect: res});
const interpolated = human.next();
log({ interpolated });
const persons = res.persons;
log({ persons: { persons } });
log({ summary: { persons: persons.length, face: res.face.length, body: res.body.length, hand: res.hand.length, object: res.object.length, gesture: res.gesture.length }});
log({ performance: human.performance });
human.tf.dispose(input.tensor);
document.body.removeChild(node);
await wait(100);
img = await image('../../samples/ai-face.jpg');
human.reset();
human.config.backend = backend;
for (const val of [0, 0.25, 0.5, 0.75, 10]) {
human.performance = {};
const t0 = performance.now();
for (let i = 0; i < 10; i++) {
res = await human.detect(img, { cacheSensitivity: val, filter: { pixelate: 5 * i } });
node = document.body.appendChild(res.canvas);
}
const t1 = performance.now();
log({ benchmark: { time: Math.round((t1 - t0) / 10), cacheSensitivity: val }, performance: human.performance });
await wait(100);
}
document.body.removeChild(node);
log({ memory: human.tf.engine().state });
}
log('');
log('tests complete');
}
main();
</script>
</body>
</html>

File diff suppressed because it is too large Load Diff

28
test/wasm-error.js Normal file
View File

@ -0,0 +1,28 @@
const fs = require('fs');
const tf = require('@tensorflow/tfjs');
const wasm = require('@tensorflow/tfjs-backend-wasm');
async function main() {
wasm.setWasmPaths('node_modules/@tensorflow/tfjs-backend-wasm/dist/');
await tf.setBackend('wasm');
await tf.ready();
console.log('tfjs:', { version: tf.version_core, backend: tf.getBackend() });
const t = {};
const data = fs.readFileSync('dist/tfjs.esm.js.map');
for (let i = 0; i <= 22; i++) {
const arr = Array.from(data);
const size = 2 ** i;
arr.length = size;
t.i32 = tf.tensor(arr, [size], 'int32');
t.f32 = tf.tensor(arr, [size], 'float32');
t.sumI = tf.sum(t.i32);
t.sumF = tf.sum(t.f32);
const JS = arr.reduce((prev, curr) => prev += curr, 0);
const I32 = t.sumI.dataSync()[0];
const F32 = t.sumF.dataSync()[0];
console.log({ size, JS, I32, F32, ok: JS === I32 });
Object.keys(t).forEach((tensor) => tf.dispose(tensor));
}
}
main();