refactor human.env to a class type

pull/233/head
Vladimir Mandic 2021-10-21 10:26:44 -04:00
parent 0688a0cebf
commit d4188518db
19 changed files with 4408 additions and 1499 deletions

View File

@ -89,7 +89,7 @@ const ui = {
autoPlay: false, // start webcam & detection on load autoPlay: false, // start webcam & detection on load
// internal variables // internal variables
exceptionHandler: true, // should capture all unhandled exceptions exceptionHandler: false, // should capture all unhandled exceptions
busy: false, // internal camera busy flag busy: false, // internal camera busy flag
menuWidth: 0, // internal menuWidth: 0, // internal
menuHeight: 0, // internal menuHeight: 0, // internal
@ -168,13 +168,15 @@ function log(...msg) {
if (ui.console) console.log(ts, ...msg); if (ui.console) console.log(ts, ...msg);
} }
let prevStatus = '';
function status(msg) { function status(msg) {
const div = document.getElementById('status'); const div = document.getElementById('status');
if (div && msg && msg.length > 0) { if (div && msg && msg !== prevStatus && msg.length > 0) {
log('status', msg); log('status', msg);
document.getElementById('play').style.display = 'none'; document.getElementById('play').style.display = 'none';
document.getElementById('loader').style.display = 'block'; document.getElementById('loader').style.display = 'block';
div.innerText = msg; div.innerText = msg;
prevStatus = msg;
} else { } else {
const video = document.getElementById('video'); const video = document.getElementById('video');
const playing = (video.srcObject !== null) && !video.paused; const playing = (video.srcObject !== null) && !video.paused;

View File

@ -12,6 +12,7 @@ import webRTC from '../helpers/webrtc.js'; // handle webrtc handshake and connec
const config = { // use default values for everything just specify models location const config = { // use default values for everything just specify models location
modelBasePath: '../../models', modelBasePath: '../../models',
backend: 'humangl',
}; };
const human = new Human(config); const human = new Human(config);

View File

@ -688,7 +688,7 @@ __export(tfjs_esm_exports, {
variableGrads: () => variableGrads, variableGrads: () => variableGrads,
version: () => version8, version: () => version8,
version_converter: () => version3, version_converter: () => version3,
version_core: () => version, version_core: () => version_core,
version_cpu: () => version5, version_cpu: () => version5,
version_layers: () => version2, version_layers: () => version2,
version_wasm: () => version7, version_wasm: () => version7,
@ -10414,7 +10414,6 @@ function encodeStrings(a) {
} }
return a; return a;
} }
var version = "0.0.0";
function enableProdMode() { function enableProdMode() {
env().set("PROD", true); env().set("PROD", true);
} }
@ -46161,7 +46160,7 @@ ENV3.registerFlag("WEBGL_USE_SHAPES_UNIFORMS", () => false);
ENV3.registerFlag("TOPK_LAST_DIM_CPU_HANDOFF_SIZE_THRESHOLD", () => 1e5); ENV3.registerFlag("TOPK_LAST_DIM_CPU_HANDOFF_SIZE_THRESHOLD", () => 1e5);
ENV3.registerFlag("TOPK_K_CPU_HANDOFF_THRESHOLD", () => 128); ENV3.registerFlag("TOPK_K_CPU_HANDOFF_THRESHOLD", () => 128);
function getGlslDifferences() { function getGlslDifferences() {
let version92; let version9;
let attribute; let attribute;
let varyingVs; let varyingVs;
let varyingFs; let varyingFs;
@ -46172,7 +46171,7 @@ function getGlslDifferences() {
let defineSpecialInf; let defineSpecialInf;
let defineRound; let defineRound;
if (env().getNumber("WEBGL_VERSION") === 2) { if (env().getNumber("WEBGL_VERSION") === 2) {
version92 = "#version 300 es"; version9 = "#version 300 es";
attribute = "in"; attribute = "in";
varyingVs = "out"; varyingVs = "out";
varyingFs = "in"; varyingFs = "in";
@ -46203,7 +46202,7 @@ function getGlslDifferences() {
} }
`; `;
} else { } else {
version92 = ""; version9 = "";
attribute = "attribute"; attribute = "attribute";
varyingVs = "varying"; varyingVs = "varying";
varyingFs = "varying"; varyingFs = "varying";
@ -46240,7 +46239,7 @@ function getGlslDifferences() {
`; `;
} }
return { return {
version: version92, version: version9,
attribute, attribute,
varyingVs, varyingVs,
varyingFs, varyingFs,
@ -55283,6 +55282,7 @@ var fromPixelsConfig = {
var fromPixels2DContext2; var fromPixels2DContext2;
function fromPixels2(args) { function fromPixels2(args) {
const { inputs, backend: backend3, attrs } = args; const { inputs, backend: backend3, attrs } = args;
console.log("webgl fromPixels args", args);
let { pixels } = inputs; let { pixels } = inputs;
const { numChannels } = attrs; const { numChannels } = attrs;
const isVideo = typeof HTMLVideoElement !== "undefined" && pixels instanceof HTMLVideoElement; const isVideo = typeof HTMLVideoElement !== "undefined" && pixels instanceof HTMLVideoElement;
@ -61417,6 +61417,7 @@ var {
stringNGramsImpl: stringNGramsImplCPU2, stringNGramsImpl: stringNGramsImplCPU2,
subImpl: subImplCPU2, subImpl: subImplCPU2,
tileImpl: tileImplCPU2, tileImpl: tileImplCPU2,
topKImpl: topKImplCPU2,
transposeImpl: transposeImplCPU2, transposeImpl: transposeImplCPU2,
uniqueImpl: uniqueImplCPU2 uniqueImpl: uniqueImplCPU2
} = shared_exports; } = shared_exports;
@ -63999,7 +64000,7 @@ function fromPixels3(args) {
} }
const isVideo = typeof HTMLVideoElement !== "undefined" && pixels instanceof HTMLVideoElement; const isVideo = typeof HTMLVideoElement !== "undefined" && pixels instanceof HTMLVideoElement;
const isImage = typeof HTMLImageElement !== "undefined" && pixels instanceof HTMLImageElement; const isImage = typeof HTMLImageElement !== "undefined" && pixels instanceof HTMLImageElement;
const isCanvas = typeof HTMLCanvasElement !== "undefined" && pixels instanceof HTMLCanvasElement; const isCanvas = typeof HTMLCanvasElement !== "undefined" && pixels instanceof HTMLCanvasElement || typeof OffscreenCanvas !== "undefined" && pixels instanceof OffscreenCanvas;
const isImageBitmap = typeof ImageBitmap !== "undefined" && pixels instanceof ImageBitmap; const isImageBitmap = typeof ImageBitmap !== "undefined" && pixels instanceof ImageBitmap;
const [width, height] = isVideo ? [ const [width, height] = isVideo ? [
pixels.videoWidth, pixels.videoWidth,
@ -64355,12 +64356,6 @@ function gatherV23(args) {
const { x, indices } = inputs; const { x, indices } = inputs;
const { axis, batchDims } = attrs; const { axis, batchDims } = attrs;
const parsedAxis = util_exports.parseAxisParam(axis, x.shape)[0]; const parsedAxis = util_exports.parseAxisParam(axis, x.shape)[0];
const indicesVals = backend3.readSync(indices.dataId);
const axisDim = x.shape[parsedAxis];
for (let i = 0; i < indicesVals.length; ++i) {
const index = indicesVals[i];
util_exports.assert(index <= axisDim - 1 && index >= 0, () => `GatherV2: the index value ${index} is not in [0, ${axisDim - 1}]`);
}
const shapeInfo = backend_util_exports.segment_util.collectGatherOpShapeInfo(x, indices, parsedAxis, batchDims); const shapeInfo = backend_util_exports.segment_util.collectGatherOpShapeInfo(x, indices, parsedAxis, batchDims);
const indicesSize = util_exports.sizeFromShape(indices.shape); const indicesSize = util_exports.sizeFromShape(indices.shape);
const toDispose = []; const toDispose = [];
@ -65673,6 +65668,272 @@ var tileConfig3 = {
backendName: "webgpu", backendName: "webgpu",
kernelFunc: tile5 kernelFunc: tile5
}; };
var SwapProgram2 = class {
constructor(shape) {
this.variableNames = ["x", "indices"];
this.workGroupSize = [256, 1, 1];
this.outputShape = shape;
this.dispatchLayout = flatDispatchLayout(this.outputShape);
this.dispatch = computeDispatch(this.dispatchLayout, this.outputShape, this.workGroupSize);
this.uniforms = `inputSize : i32; firstPass : i32; negativeInf : f32;
dir : i32; inc : i32;`;
this.shaderKey = "swap";
this.size = util_exports.sizeFromShape(this.outputShape);
}
getUserCode() {
const userCode = `
${getMainHeaderString()} {
${getGlobalIndexString()}
if (index < uniforms.size) {
let outC = getOutputCoords(globalId, index);
let batch = outC[0];
let elemIdx = outC[1];
// We compare elements pair-wise within a group of size 2 * inc.
// The comparing rule for each group alternates between ascending
// and descending. Within each group, we compare each pair at
// positions i and i+inc. To decide whether an element at position i
// is x0 or x1, we mod it by 2 * inc, if the result is smaller than
// inc, it is in the first half of the group, we denote it as x0,
// otherwise we denote it as x1.
// For example, as shown in the Bitonic top K paper referenced
// above, Figure5(a) shows that element[1] is in the second half of
// the group when group size is 2, but it is in the first half of
// the group when group size is 4.
let isFirstInPair = elemIdx % (2 * uniforms.inc) < uniforms.inc;
var i = 0;
if (isFirstInPair) {
i = elemIdx;
} else {
i = elemIdx - uniforms.inc;
}
var i0 = 0;
if (uniforms.firstPass == 1) {
i0 = i;
} else {
i0 = i32(getIndices(batch, i));
}
var i1 = 0;
if (uniforms.firstPass == 1) {
i1 = i + uniforms.inc;
} else {
i1 = i32(getIndices(batch, i + uniforms.inc));
}
var x0 = f32(0.0);
var x1 = f32(0.0);
if (i0 < uniforms.inputSize) {
x0 = getX(batch, i0);
} else {
x0 = uniforms.negativeInf;
}
if (i1 < uniforms.inputSize) {
x1 = getX(batch, i1);
} else {
x1 = uniforms.negativeInf;
}
let reverse = elemIdx % (2 * uniforms.dir) >= uniforms.dir;
let isGreater = x0 > x1 || (x0 == x1 && i1 > i0);
if (reverse == isGreater) {
// Elements in opposite order of direction
let iTemp = i0;
i0 = i1;
i1 = iTemp;
}
if (isFirstInPair) {
setOutputFlat(index, f32(i0));
} else {
setOutputFlat(index, f32(i1));
}
}
}
`;
return userCode;
}
};
var MergeProgram2 = class {
constructor(shape) {
this.variableNames = ["x", "indices"];
this.workGroupSize = [256, 1, 1];
this.outputShape = shape;
this.dispatchLayout = flatDispatchLayout(this.outputShape);
this.dispatch = computeDispatch(this.dispatchLayout, this.outputShape, this.workGroupSize);
this.uniforms = `inputSize : i32; firstPass : i32; k : i32;`;
this.shaderKey = "merge";
this.size = util_exports.sizeFromShape(this.outputShape);
}
getUserCode() {
const userCode = `
${getMainHeaderString()} {
${getGlobalIndexString()}
if (index < uniforms.size) {
let outC = getOutputCoords(globalId, index);
let batch = outC[0];
let elemIdx = outC[1];
// The output size is half of the previous size.
// If the previous sequence is | | | | _ _ _ _ | | | | _ _ _ _
// (k=4), we only need to output the indices at positions |, the
// indices at positions _ can be thrown away, see Figure5(b) After
// Phase 2 (Merge phase) in the Bitonic Top K paper referenced
// above.
// For example, the paper shows we only need to output the orange
// bars. The output sequence should look like this | | | | | | | |.
// Because the sequence is halved, to map the output index back to
// the previous sequence to find the corresponding value, we need
// to double the index. When we double the index, we basically
// interpolate a position, so 2i looks like
// | _ | _ | _ | _ | _ | _ | _. We move the | to the first k
// position of each 2k positions by - elemIdx % k. E.g. for output
// at index 4,5,6,7, we want to get the corresponding element at
// original index 8,9,10,11, for output at index 8,9,10,11,
// we want to get the corresponding element at original index
// 16,17,18,19, so on and so forth.
var i = 0;
if (elemIdx < uniforms.k) {
i = elemIdx;
} else {
i = elemIdx * 2 - elemIdx % uniforms.k;
}
var i0 = 0;
if (uniforms.firstPass == 1) {
i0 = i;
} else {
i0 = i32(getIndices(batch, i));
}
var i1 = 0;
if (uniforms.firstPass == 1) {
i1 = i + uniforms.k;
} else {
i1 = i32(getIndices(batch, i + uniforms.k));
}
let x0 = getX(batch, i0);
var x1 = f32(0.0);
if (i1 < uniforms.inputSize) {
x1 = getX(batch, i1);
} else {
x1 = x0;
}
if (x0 >= x1) {
setOutputFlat(index, f32(i0));
} else {
setOutputFlat(index, f32(i1));
}
}
}
`;
return userCode;
}
};
function disposeIntermediateTensorInfoOrNull2(backend3, tensorInfo) {
if (tensorInfo !== null) {
backend3.disposeData(tensorInfo.dataId);
}
}
function roundUpToPow22(num) {
let pow22 = 1;
while (pow22 < num) {
pow22 *= 2;
}
return pow22;
}
function topK3(args) {
const { inputs, backend: backend3, attrs } = args;
const { x } = inputs;
const { k, sorted } = attrs;
const xShape = x.shape;
const lastDim = xShape[xShape.length - 1];
if (backend3.shouldExecuteOnCPU([x])) {
const xVals = backend3.readSync(x.dataId);
const [allTopKVals, allTopKIndices] = topKImplCPU2(xVals, xShape, x.dtype, k, sorted);
return [
backend3.makeTensorInfo(allTopKVals.shape, allTopKVals.dtype, allTopKVals.values),
backend3.makeTensorInfo(allTopKIndices.shape, allTopKIndices.dtype, allTopKIndices.values)
];
}
if (k === 0) {
xShape[xShape.length - 1] = 0;
return [
backend3.makeTensorInfo(xShape, x.dtype, []),
backend3.makeTensorInfo(xShape, "int32", [])
];
}
if (lastDim === 1) {
return [
x,
fill4({ attrs: { shape: xShape, dtype: "int32", value: 0 }, backend: backend3 })
];
}
const xSize = util_exports.sizeFromShape(xShape);
const batch = xSize / lastDim;
const x2D = reshape5({ inputs: { x }, attrs: { shape: [batch, lastDim] }, backend: backend3 });
const kPow2 = roundUpToPow22(k);
const lastDimPow2 = roundUpToPow22(lastDim);
let indices = null;
const getInputs = () => indices === null ? [x2D, x2D] : [x2D, indices];
const runSwap = (dir, inc, shape) => {
const inputs2 = getInputs();
const program = new SwapProgram2(shape);
const firstPass = indices === null ? 1 : 0;
const uniformDataSwap = [
{ type: "int32", data: [lastDim] },
{ type: "int32", data: [firstPass] },
{ type: "float32", data: [Number.NEGATIVE_INFINITY] },
{ type: "int32", data: [dir] },
{ type: "int32", data: [inc] }
];
const prevIndices2 = indices;
indices = backend3.runWebGPUProgram(program, inputs2, "int32", uniformDataSwap);
disposeIntermediateTensorInfoOrNull2(backend3, prevIndices2);
};
for (let len = 1; len < kPow2; len *= 2) {
const dir = len * 2;
for (let inc = len; inc >= 1; inc /= 2) {
runSwap(dir, inc, [batch, lastDimPow2]);
}
}
for (let indicesSize = lastDimPow2; indicesSize > kPow2; indicesSize /= 2) {
const inputs2 = getInputs();
const mergeProgram = new MergeProgram2([batch, indicesSize / 2]);
const firstPass = indices === null ? 1 : 0;
const uniformDataMerge = [
{ type: "int32", data: [lastDim] },
{ type: "int32", data: [firstPass] },
{ type: "int32", data: [kPow2] }
];
const prevIndices2 = indices;
indices = backend3.runWebGPUProgram(mergeProgram, inputs2, "int32", uniformDataMerge);
disposeIntermediateTensorInfoOrNull2(backend3, prevIndices2);
const len = kPow2 / 2;
const dir = len * 2;
for (let inc = len; inc >= 1; inc /= 2) {
runSwap(dir, inc, indices.shape);
}
}
let prevIndices = indices;
indices = slice4({ inputs: { x: indices }, backend: backend3, attrs: { begin: 0, size: [batch, k] } });
disposeIntermediateTensorInfoOrNull2(backend3, prevIndices);
let values = gatherV23({ inputs: { x: x2D, indices }, backend: backend3, attrs: { axis: 1, batchDims: 1 } });
disposeIntermediateTensorInfoOrNull2(backend3, x2D);
const newShape = xShape.slice(0, -1);
newShape.push(k);
prevIndices = indices;
indices = reshape5({ inputs: { x: indices }, attrs: { shape: newShape }, backend: backend3 });
disposeIntermediateTensorInfoOrNull2(backend3, prevIndices);
const prevValues = values;
values = reshape5({ inputs: { x: values }, attrs: { shape: newShape }, backend: backend3 });
disposeIntermediateTensorInfoOrNull2(backend3, prevValues);
return [values, indices];
}
var topKConfig3 = {
kernelName: TopK,
backendName: "webgpu",
kernelFunc: topK3
};
var TransformProgram2 = class { var TransformProgram2 = class {
constructor(outShape) { constructor(outShape) {
this.variableNames = ["Image", "Transforms"]; this.variableNames = ["Image", "Transforms"];
@ -65980,6 +66241,7 @@ var kernelConfigs3 = [
sumConfig3, sumConfig3,
tanhConfig3, tanhConfig3,
tileConfig3, tileConfig3,
topKConfig3,
transformConfig3, transformConfig3,
transposeConfig3, transposeConfig3,
unpackConfig3, unpackConfig3,
@ -66842,11 +67104,11 @@ var _WebGPUBackend = class extends KernelBackend {
this.disposed = true; this.disposed = true;
} }
}; };
var WebGPUBackend71 = _WebGPUBackend; var WebGPUBackend72 = _WebGPUBackend;
WebGPUBackend71.nextDataId = 0; WebGPUBackend72.nextDataId = 0;
var webgpu_exports = {}; var webgpu_exports = {};
__export2(webgpu_exports, { __export2(webgpu_exports, {
WebGPUBackend: () => WebGPUBackend71, WebGPUBackend: () => WebGPUBackend72,
webgpu_util: () => webgpu_util_exports webgpu_util: () => webgpu_util_exports
}); });
if (device_util_exports.isBrowser() && isWebGPUSupported()) { if (device_util_exports.isBrowser() && isWebGPUSupported()) {
@ -66864,7 +67126,7 @@ if (device_util_exports.isBrowser() && isWebGPUSupported()) {
console.warn(`This device doesn't support timestamp-query extension. Start Chrome browser with flag --disable-dawn-features=disallow_unsafe_apis then try again. Or zero will shown for the kernel time when profiling mode isenabled. Using performance.now is not workable for webgpu sinceit doesn't support synchronously to read data from GPU.`); console.warn(`This device doesn't support timestamp-query extension. Start Chrome browser with flag --disable-dawn-features=disallow_unsafe_apis then try again. Or zero will shown for the kernel time when profiling mode isenabled. Using performance.now is not workable for webgpu sinceit doesn't support synchronously to read data from GPU.`);
} }
const device = await adapter.requestDevice(deviceDescriptor); const device = await adapter.requestDevice(deviceDescriptor);
return new WebGPUBackend71(device, supportTimeQuery); return new WebGPUBackend72(device, supportTimeQuery);
}, 3); }, 3);
} }
var CppDType; var CppDType;
@ -69450,7 +69712,7 @@ var topk2 = ({ inputs, backend: backend3, attrs }) => {
wasmTopK(xId, xShapeBytes, x.shape.length, CppDType[x.dtype], k, sorted, outValuesId, outIndicesId); wasmTopK(xId, xShapeBytes, x.shape.length, CppDType[x.dtype], k, sorted, outValuesId, outIndicesId);
return [outValues, outIndices]; return [outValues, outIndices];
}; };
var topKConfig3 = { var topKConfig4 = {
kernelName: TopK, kernelName: TopK,
backendName: "wasm", backendName: "wasm",
setupFunc: setup44, setupFunc: setup44,
@ -69658,7 +69920,7 @@ var kernelConfigs4 = [
tanConfig3, tanConfig3,
tanhConfig4, tanhConfig4,
tileConfig4, tileConfig4,
topKConfig3, topKConfig4,
transformConfig4, transformConfig4,
transposeConfig4, transposeConfig4,
unpackConfig4, unpackConfig4,
@ -70026,7 +70288,7 @@ registerBackend("wasm", async () => {
const { wasm } = await init(); const { wasm } = await init();
return new BackendWasm67(wasm); return new BackendWasm67(wasm);
}, WASM_PRIORITY); }, WASM_PRIORITY);
var externalVersion = "3.9.0-20211020"; var externalVersion = "3.9.0-20211021";
var version8 = { var version8 = {
tfjs: externalVersion, tfjs: externalVersion,
"tfjs-core": externalVersion, "tfjs-core": externalVersion,
@ -70037,9 +70299,7 @@ var version8 = {
"tfjs-backend-webgl": externalVersion, "tfjs-backend-webgl": externalVersion,
"tfjs-backend-wasm": externalVersion "tfjs-backend-wasm": externalVersion
}; };
var version_core = version8["tfjs-core"];
// package.json
var version9 = "2.3.5";
// src/image/imagefxshaders.ts // src/image/imagefxshaders.ts
var vertexIdentity = ` var vertexIdentity = `
@ -70182,7 +70442,7 @@ var GLProgram = class {
this.uniform[u] = this.gl.getUniformLocation(this.id, u); this.uniform[u] = this.gl.getUniformLocation(this.id, u);
} }
}; };
function GLImageFilter(params = {}) { function GLImageFilter() {
let drawCount = 0; let drawCount = 0;
let sourceTexture = null; let sourceTexture = null;
let lastInChain = false; let lastInChain = false;
@ -70191,17 +70451,17 @@ function GLImageFilter(params = {}) {
let filterChain = []; let filterChain = [];
let vertexBuffer = null; let vertexBuffer = null;
let currentProgram = null; let currentProgram = null;
const canvas3 = params["canvas"] || typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(100, 100) : document.createElement("canvas"); const fxcanvas = canvas(100, 100);
const shaderProgramCache = {}; const shaderProgramCache = {};
const DRAW = { INTERMEDIATE: 1 }; const DRAW = { INTERMEDIATE: 1 };
const gl = canvas3.getContext("webgl"); const gl = fxcanvas.getContext("webgl");
if (!gl) if (!gl)
throw new Error("filter: cannot get webgl context"); throw new Error("filter: cannot get webgl context");
function resize(width, height) { function resize(width, height) {
if (width === canvas3.width && height === canvas3.height) if (width === fxcanvas.width && height === fxcanvas.height)
return; return;
canvas3.width = width; fxcanvas.width = width;
canvas3.height = height; fxcanvas.height = height;
if (!vertexBuffer) { if (!vertexBuffer) {
const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]);
vertexBuffer = gl.createBuffer(); vertexBuffer = gl.createBuffer();
@ -70209,7 +70469,7 @@ function GLImageFilter(params = {}) {
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true); gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
} }
gl.viewport(0, 0, canvas3.width, canvas3.height); gl.viewport(0, 0, fxcanvas.width, fxcanvas.height);
tempFramebuffers = [null, null]; tempFramebuffers = [null, null];
} }
function createFramebufferTexture(width, height) { function createFramebufferTexture(width, height) {
@ -70230,7 +70490,7 @@ function GLImageFilter(params = {}) {
return { fbo, texture }; return { fbo, texture };
} }
function getTempFramebuffer(index) { function getTempFramebuffer(index) {
tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(canvas3.width, canvas3.height); tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(fxcanvas.width, fxcanvas.height);
return tempFramebuffers[index]; return tempFramebuffers[index];
} }
function draw2(flags = 0) { function draw2(flags = 0) {
@ -70592,8 +70852,8 @@ function GLImageFilter(params = {}) {
}, },
convolution: (matrix) => { convolution: (matrix) => {
const m = new Float32Array(matrix); const m = new Float32Array(matrix);
const pixelSizeX = 1 / canvas3.width; const pixelSizeX = 1 / fxcanvas.width;
const pixelSizeY = 1 / canvas3.height; const pixelSizeY = 1 / fxcanvas.height;
const program = compileShader(convolution); const program = compileShader(convolution);
gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m); gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY);
@ -70667,8 +70927,8 @@ function GLImageFilter(params = {}) {
]); ]);
}, },
blur: (size2) => { blur: (size2) => {
const blurSizeX = size2 / 7 / canvas3.width; const blurSizeX = size2 / 7 / fxcanvas.width;
const blurSizeY = size2 / 7 / canvas3.height; const blurSizeY = size2 / 7 / fxcanvas.height;
const program = compileShader(blur); const program = compileShader(blur);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY);
draw2(DRAW.INTERMEDIATE); draw2(DRAW.INTERMEDIATE);
@ -70676,8 +70936,8 @@ function GLImageFilter(params = {}) {
draw2(); draw2();
}, },
pixelate: (size2) => { pixelate: (size2) => {
const blurSizeX = size2 / canvas3.width; const blurSizeX = size2 / fxcanvas.width;
const blurSizeY = size2 / canvas3.height; const blurSizeY = size2 / fxcanvas.height;
const program = compileShader(pixelate); const program = compileShader(pixelate);
gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY);
draw2(); draw2();
@ -70710,7 +70970,7 @@ function GLImageFilter(params = {}) {
const f = filterChain[i]; const f = filterChain[i];
f.func.apply(this, f.args || []); f.func.apply(this, f.args || []);
} }
return canvas3; return fxcanvas;
}; };
this.draw = function(image7) { this.draw = function(image7) {
this.add("brightness", 0); this.add("brightness", 0);
@ -70718,94 +70978,6 @@ function GLImageFilter(params = {}) {
}; };
} }
// src/util/env.ts
var env2 = {
browser: void 0,
node: void 0,
worker: void 0,
platform: void 0,
agent: void 0,
initial: true,
backends: [],
offscreen: void 0,
filter: void 0,
tfjs: {
version: void 0
},
wasm: {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
},
webgl: {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
},
webgpu: {
supported: void 0,
backend: void 0,
adapter: void 0
},
kernels: [],
Canvas: void 0,
Image: void 0,
ImageData: void 0
};
async function backendInfo() {
var _a;
env2.backends = Object.keys(engine().registryFactory);
env2.wasm.supported = typeof WebAssembly !== "undefined";
env2.wasm.backend = env2.backends.includes("wasm");
if (env2.wasm.supported && env2.wasm.backend && getBackend() === "wasm") {
env2.wasm.simd = await env().getAsync("WASM_HAS_SIMD_SUPPORT");
env2.wasm.multithread = await env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
env2.webgl.supported = typeof ctx !== "undefined";
env2.webgl.backend = env2.backends.includes("webgl");
if (env2.webgl.supported && env2.webgl.backend && (getBackend() === "webgl" || getBackend() === "humangl")) {
const gl = backend().gpgpu !== "undefined" ? await backend().getGPGPUContext().gl : null;
if (gl) {
env2.webgl.version = gl.getParameter(gl.VERSION);
env2.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
env2.webgpu.supported = env2.browser && typeof navigator["gpu"] !== "undefined";
env2.webgpu.backend = env2.backends.includes("webgpu");
if (env2.webgpu.supported)
env2.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
env2.kernels = getKernelsForBackend(getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async function get3() {
env2.browser = typeof navigator !== "undefined";
env2.node = typeof process !== "undefined";
env2.tfjs.version = version;
env2.offscreen = typeof env2.offscreen === "undefined" ? typeof OffscreenCanvas !== "undefined" : env2.offscreen;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
env2.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
env2.agent = navigator.userAgent.replace(raw[0], "");
if (env2.platform[1])
env2.agent = env2.agent.replace(raw[1], "");
env2.agent = env2.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
env2.platform = `${process.platform} ${process.arch}`;
env2.agent = `NodeJS ${process.version}`;
}
env2.worker = env2.browser && env2.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
await backendInfo();
}
async function set(obj) {
env2 = mergeDeep(env2, obj);
}
// src/image/image.ts // src/image/image.ts
var maxSize = 2048; var maxSize = 2048;
var inCanvas = null; var inCanvas = null;
@ -70907,7 +71079,7 @@ function process2(input2, config3, getTensor2 = true) {
outCanvas = canvas(inCanvas.width, inCanvas.height); outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env2.webgl.supported) { if (config3.filter.enabled && env2.webgl.supported) {
if (!fx) if (!fx)
fx = env2.browser ? new GLImageFilter({ canvas: outCanvas }) : null; fx = env2.browser ? new GLImageFilter() : null;
env2.filter = !!fx; env2.filter = !!fx;
if (!fx) if (!fx)
return { tensor: null, canvas: inCanvas }; return { tensor: null, canvas: inCanvas };
@ -71039,6 +71211,120 @@ async function skip(config3, input2) {
return skipFrame; return skipFrame;
} }
// src/util/env.ts
var Env = class {
constructor() {
__publicField(this, "browser");
__publicField(this, "node");
__publicField(this, "worker");
__publicField(this, "platform", "");
__publicField(this, "agent", "");
__publicField(this, "backends", []);
__publicField(this, "initial");
__publicField(this, "filter");
__publicField(this, "tfjs");
__publicField(this, "offscreen");
__publicField(this, "wasm", {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
});
__publicField(this, "webgl", {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
});
__publicField(this, "webgpu", {
supported: void 0,
backend: void 0,
adapter: void 0
});
__publicField(this, "cpu", {
model: void 0,
flags: []
});
__publicField(this, "kernels", []);
__publicField(this, "Canvas");
__publicField(this, "Image");
__publicField(this, "ImageData");
this.browser = typeof navigator !== "undefined";
this.node = typeof process !== "undefined";
this.tfjs = { version: version_core };
this.offscreen = typeof OffscreenCanvas !== "undefined";
this.initial = true;
this.worker = this.browser && this.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
this.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
this.agent = navigator.userAgent.replace(raw[0], "");
if (this.platform[1])
this.agent = this.agent.replace(raw[1], "");
this.agent = this.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
this.platform = `${process.platform} ${process.arch}`;
this.agent = `NodeJS ${process.version}`;
}
}
async updateBackend() {
var _a;
this.backends = Object.keys(engine().registryFactory);
this.wasm.supported = typeof WebAssembly !== "undefined";
this.wasm.backend = this.backends.includes("wasm");
if (this.wasm.supported && this.wasm.backend && getBackend() === "wasm") {
this.wasm.simd = await env().getAsync("WASM_HAS_SIMD_SUPPORT");
this.wasm.multithread = await env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
this.webgl.supported = typeof ctx !== "undefined";
this.webgl.backend = this.backends.includes("webgl");
if (this.webgl.supported && this.webgl.backend && (getBackend() === "webgl" || getBackend() === "humangl")) {
const gl = backend().gpgpu !== "undefined" ? await backend().getGPGPUContext().gl : null;
if (gl) {
this.webgl.version = gl.getParameter(gl.VERSION);
this.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
this.webgpu.supported = this.browser && typeof navigator["gpu"] !== "undefined";
this.webgpu.backend = this.backends.includes("webgpu");
if (this.webgpu.supported)
this.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
this.kernels = getKernelsForBackend(getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async updateCPU() {
var _a;
const cpu = { model: "", flags: [] };
if (this.node && ((_a = this.platform) == null ? void 0 : _a.startsWith("linux"))) {
const fs = __require("fs");
try {
const data = fs.readFileSync("/proc/cpuinfo").toString();
for (const line of data.split("\n")) {
if (line.startsWith("model name")) {
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
}
if (line.startsWith("flags")) {
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
}
}
} catch (e) {
}
}
if (!this["cpu"])
Object.defineProperty(this, "cpu", { value: cpu });
else
this["cpu"] = cpu;
}
};
var env2 = new Env();
// package.json
var version = "2.3.5";
// src/gear/gear-agegenderrace.ts // src/gear/gear-agegenderrace.ts
var model2; var model2;
var skipped = Number.MAX_SAFE_INTEGER; var skipped = Number.MAX_SAFE_INTEGER;
@ -80594,7 +80880,6 @@ async function check(instance, force = false) {
if (getBackend() === "humangl") { if (getBackend() === "humangl") {
ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false); ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
ENV.set("WEBGL_CPU_FORWARD", true); ENV.set("WEBGL_CPU_FORWARD", true);
ENV.set("WEBGL_PACK_DEPTHWISECONV", false);
ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true); ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true);
ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256); ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256);
if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) { if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) {
@ -80608,16 +80893,12 @@ async function check(instance, force = false) {
} }
} }
if (getBackend() === "webgpu") { if (getBackend() === "webgpu") {
ENV.set("WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD", 512);
ENV.set("WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE", 0);
ENV.set("WEBGPU_CPU_FORWARD", true);
} }
enableProdMode(); enableProdMode();
await ready(); await ready();
instance.performance.backend = Math.trunc(now() - timeStamp); instance.performance.backend = Math.trunc(now() - timeStamp);
instance.config.backend = getBackend(); instance.config.backend = getBackend();
get3(); env2.updateBackend();
instance.env = env2;
} }
return true; return true;
} }
@ -82496,13 +82777,12 @@ var Human = class {
if (this.events && this.events.dispatchEvent) if (this.events && this.events.dispatchEvent)
(_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event)); (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
}); });
get3();
this.env = env2; this.env = env2;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${version}/dist/`; config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${version_core}/dist/`;
config.modelBasePath = this.env.browser ? "../models/" : "file://models/"; config.modelBasePath = env2.browser ? "../models/" : "file://models/";
config.backend = this.env.browser ? "humangl" : "tensorflow"; config.backend = env2.browser ? "humangl" : "tensorflow";
this.version = version9; this.version = version;
Object.defineProperty(this, "version", { value: version9 }); Object.defineProperty(this, "version", { value: version });
this.config = JSON.parse(JSON.stringify(config)); this.config = JSON.parse(JSON.stringify(config));
Object.seal(this.config); Object.seal(this.config);
if (userConfig) if (userConfig)
@ -82556,7 +82836,6 @@ var Human = class {
async init() { async init() {
await check(this, true); await check(this, true);
await this.tf.ready(); await this.tf.ready();
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -82767,6 +83046,7 @@ _analyzeMemoryLeaks = new WeakMap();
_checkSanity = new WeakMap(); _checkSanity = new WeakMap();
_sanity = new WeakMap(); _sanity = new WeakMap();
export { export {
Env,
Human, Human,
Models, Models,
Human as default, Human as default,

File diff suppressed because one or more lines are too long

View File

@ -10,6 +10,13 @@ var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty; var __hasOwnProp = Object.prototype.hasOwnProperty;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __markAsModule = (target) => __defProp(target, "__esModule", { value: true }); var __markAsModule = (target) => __defProp(target, "__esModule", { value: true });
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
}) : x)(function(x) {
if (typeof require !== "undefined")
return require.apply(this, arguments);
throw new Error('Dynamic require of "' + x + '" is not supported');
});
var __export = (target, all2) => { var __export = (target, all2) => {
__markAsModule(target); __markAsModule(target);
for (var name in all2) for (var name in all2)
@ -242,9 +249,6 @@ var version9 = {
"tfjs-backend-wasm": version8 "tfjs-backend-wasm": version8
}; };
// package.json
var version10 = "2.3.5";
// src/image/imagefxshaders.ts // src/image/imagefxshaders.ts
var vertexIdentity = ` var vertexIdentity = `
precision highp float; precision highp float;
@ -386,7 +390,7 @@ var GLProgram = class {
this.uniform[u] = this.gl.getUniformLocation(this.id, u); this.uniform[u] = this.gl.getUniformLocation(this.id, u);
} }
}; };
function GLImageFilter(params = {}) { function GLImageFilter() {
let drawCount = 0; let drawCount = 0;
let sourceTexture = null; let sourceTexture = null;
let lastInChain = false; let lastInChain = false;
@ -395,17 +399,17 @@ function GLImageFilter(params = {}) {
let filterChain = []; let filterChain = [];
let vertexBuffer = null; let vertexBuffer = null;
let currentProgram = null; let currentProgram = null;
const canvas3 = params["canvas"] || typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(100, 100) : document.createElement("canvas"); const fxcanvas = canvas(100, 100);
const shaderProgramCache = {}; const shaderProgramCache = {};
const DRAW = { INTERMEDIATE: 1 }; const DRAW = { INTERMEDIATE: 1 };
const gl = canvas3.getContext("webgl"); const gl = fxcanvas.getContext("webgl");
if (!gl) if (!gl)
throw new Error("filter: cannot get webgl context"); throw new Error("filter: cannot get webgl context");
function resize(width, height) { function resize(width, height) {
if (width === canvas3.width && height === canvas3.height) if (width === fxcanvas.width && height === fxcanvas.height)
return; return;
canvas3.width = width; fxcanvas.width = width;
canvas3.height = height; fxcanvas.height = height;
if (!vertexBuffer) { if (!vertexBuffer) {
const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]);
vertexBuffer = gl.createBuffer(); vertexBuffer = gl.createBuffer();
@ -413,7 +417,7 @@ function GLImageFilter(params = {}) {
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true); gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
} }
gl.viewport(0, 0, canvas3.width, canvas3.height); gl.viewport(0, 0, fxcanvas.width, fxcanvas.height);
tempFramebuffers = [null, null]; tempFramebuffers = [null, null];
} }
function createFramebufferTexture(width, height) { function createFramebufferTexture(width, height) {
@ -434,7 +438,7 @@ function GLImageFilter(params = {}) {
return { fbo, texture }; return { fbo, texture };
} }
function getTempFramebuffer(index) { function getTempFramebuffer(index) {
tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(canvas3.width, canvas3.height); tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(fxcanvas.width, fxcanvas.height);
return tempFramebuffers[index]; return tempFramebuffers[index];
} }
function draw2(flags = 0) { function draw2(flags = 0) {
@ -796,8 +800,8 @@ function GLImageFilter(params = {}) {
}, },
convolution: (matrix) => { convolution: (matrix) => {
const m = new Float32Array(matrix); const m = new Float32Array(matrix);
const pixelSizeX = 1 / canvas3.width; const pixelSizeX = 1 / fxcanvas.width;
const pixelSizeY = 1 / canvas3.height; const pixelSizeY = 1 / fxcanvas.height;
const program = compileShader(convolution); const program = compileShader(convolution);
gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m); gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY);
@ -871,8 +875,8 @@ function GLImageFilter(params = {}) {
]); ]);
}, },
blur: (size2) => { blur: (size2) => {
const blurSizeX = size2 / 7 / canvas3.width; const blurSizeX = size2 / 7 / fxcanvas.width;
const blurSizeY = size2 / 7 / canvas3.height; const blurSizeY = size2 / 7 / fxcanvas.height;
const program = compileShader(blur); const program = compileShader(blur);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY);
draw2(DRAW.INTERMEDIATE); draw2(DRAW.INTERMEDIATE);
@ -880,8 +884,8 @@ function GLImageFilter(params = {}) {
draw2(); draw2();
}, },
pixelate: (size2) => { pixelate: (size2) => {
const blurSizeX = size2 / canvas3.width; const blurSizeX = size2 / fxcanvas.width;
const blurSizeY = size2 / canvas3.height; const blurSizeY = size2 / fxcanvas.height;
const program = compileShader(pixelate); const program = compileShader(pixelate);
gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY);
draw2(); draw2();
@ -914,7 +918,7 @@ function GLImageFilter(params = {}) {
const f = filterChain[i]; const f = filterChain[i];
f.func.apply(this, f.args || []); f.func.apply(this, f.args || []);
} }
return canvas3; return fxcanvas;
}; };
this.draw = function(image25) { this.draw = function(image25) {
this.add("brightness", 0); this.add("brightness", 0);
@ -922,94 +926,6 @@ function GLImageFilter(params = {}) {
}; };
} }
// src/util/env.ts
var env2 = {
browser: void 0,
node: void 0,
worker: void 0,
platform: void 0,
agent: void 0,
initial: true,
backends: [],
offscreen: void 0,
filter: void 0,
tfjs: {
version: void 0
},
wasm: {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
},
webgl: {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
},
webgpu: {
supported: void 0,
backend: void 0,
adapter: void 0
},
kernels: [],
Canvas: void 0,
Image: void 0,
ImageData: void 0
};
async function backendInfo() {
var _a;
env2.backends = Object.keys(tfjs_esm_exports.engine().registryFactory);
env2.wasm.supported = typeof WebAssembly !== "undefined";
env2.wasm.backend = env2.backends.includes("wasm");
if (env2.wasm.supported && env2.wasm.backend && tfjs_esm_exports.getBackend() === "wasm") {
env2.wasm.simd = await tfjs_esm_exports.env().getAsync("WASM_HAS_SIMD_SUPPORT");
env2.wasm.multithread = await tfjs_esm_exports.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
env2.webgl.supported = typeof ctx !== "undefined";
env2.webgl.backend = env2.backends.includes("webgl");
if (env2.webgl.supported && env2.webgl.backend && (tfjs_esm_exports.getBackend() === "webgl" || tfjs_esm_exports.getBackend() === "humangl")) {
const gl = tfjs_esm_exports.backend().gpgpu !== "undefined" ? await tfjs_esm_exports.backend().getGPGPUContext().gl : null;
if (gl) {
env2.webgl.version = gl.getParameter(gl.VERSION);
env2.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
env2.webgpu.supported = env2.browser && typeof navigator["gpu"] !== "undefined";
env2.webgpu.backend = env2.backends.includes("webgpu");
if (env2.webgpu.supported)
env2.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
env2.kernels = tfjs_esm_exports.getKernelsForBackend(tfjs_esm_exports.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async function get() {
env2.browser = typeof navigator !== "undefined";
env2.node = typeof process !== "undefined";
env2.tfjs.version = tfjs_esm_exports.version_core;
env2.offscreen = typeof env2.offscreen === "undefined" ? typeof OffscreenCanvas !== "undefined" : env2.offscreen;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
env2.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
env2.agent = navigator.userAgent.replace(raw[0], "");
if (env2.platform[1])
env2.agent = env2.agent.replace(raw[1], "");
env2.agent = env2.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
env2.platform = `${process.platform} ${process.arch}`;
env2.agent = `NodeJS ${process.version}`;
}
env2.worker = env2.browser && env2.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
await backendInfo();
}
async function set(obj) {
env2 = mergeDeep(env2, obj);
}
// src/image/image.ts // src/image/image.ts
var maxSize = 2048; var maxSize = 2048;
var inCanvas = null; var inCanvas = null;
@ -1018,8 +934,8 @@ var tmpCanvas = null;
var fx; var fx;
function canvas(width, height) { function canvas(width, height) {
let c; let c;
if (env2.browser) { if (env.browser) {
if (env2.offscreen) { if (env.offscreen) {
c = new OffscreenCanvas(width, height); c = new OffscreenCanvas(width, height);
} else { } else {
if (typeof document === "undefined") if (typeof document === "undefined")
@ -1029,8 +945,8 @@ function canvas(width, height) {
c.height = height; c.height = height;
} }
} else { } else {
if (typeof env2.Canvas !== "undefined") if (typeof env.Canvas !== "undefined")
c = new env2.Canvas(width, height); c = new env.Canvas(width, height);
else if (typeof globalThis.Canvas !== "undefined") else if (typeof globalThis.Canvas !== "undefined")
c = new globalThis.Canvas(width, height); c = new globalThis.Canvas(width, height);
} }
@ -1048,7 +964,7 @@ function process2(input, config3, getTensor = true) {
log("input is missing"); log("input is missing");
return { tensor: null, canvas: null }; return { tensor: null, canvas: null };
} }
if (!(input instanceof tfjs_esm_exports.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env2.Canvas !== "undefined" && input instanceof env2.Canvas) && !(typeof globalThis.Canvas !== "undefined" && input instanceof globalThis.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) { if (!(input instanceof tfjs_esm_exports.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof globalThis.Canvas !== "undefined" && input instanceof globalThis.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
throw new Error("input type is not recognized"); throw new Error("input type is not recognized");
} }
if (input instanceof tfjs_esm_exports.Tensor) { if (input instanceof tfjs_esm_exports.Tensor) {
@ -1109,10 +1025,10 @@ function process2(input, config3, getTensor = true) {
} }
if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height)) if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height))
outCanvas = canvas(inCanvas.width, inCanvas.height); outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env2.webgl.supported) { if (config3.filter.enabled && env.webgl.supported) {
if (!fx) if (!fx)
fx = env2.browser ? new GLImageFilter({ canvas: outCanvas }) : null; fx = env.browser ? new GLImageFilter() : null;
env2.filter = !!fx; env.filter = !!fx;
if (!fx) if (!fx)
return { tensor: null, canvas: inCanvas }; return { tensor: null, canvas: inCanvas };
fx.reset(); fx.reset();
@ -1152,7 +1068,7 @@ function process2(input, config3, getTensor = true) {
copy(inCanvas, outCanvas); copy(inCanvas, outCanvas);
if (fx) if (fx)
fx = null; fx = null;
env2.filter = !!fx; env.filter = !!fx;
} }
if (!getTensor) if (!getTensor)
return { tensor: null, canvas: outCanvas }; return { tensor: null, canvas: outCanvas };
@ -1161,7 +1077,7 @@ function process2(input, config3, getTensor = true) {
let pixels; let pixels;
let depth = 3; let depth = 3;
if (typeof ImageData !== "undefined" && input instanceof ImageData || input["data"] && input["width"] && input["height"]) { if (typeof ImageData !== "undefined" && input instanceof ImageData || input["data"] && input["width"] && input["height"]) {
if (env2.browser && tfjs_esm_exports.browser) { if (env.browser && tfjs_esm_exports.browser) {
pixels = tfjs_esm_exports.browser ? tfjs_esm_exports.browser.fromPixels(input) : null; pixels = tfjs_esm_exports.browser ? tfjs_esm_exports.browser.fromPixels(input) : null;
} else { } else {
depth = input["data"].length / input["height"] / input["width"]; depth = input["data"].length / input["height"] / input["width"];
@ -1171,7 +1087,7 @@ function process2(input, config3, getTensor = true) {
} else { } else {
if (!tmpCanvas || outCanvas.width !== tmpCanvas.width || (outCanvas == null ? void 0 : outCanvas.height) !== (tmpCanvas == null ? void 0 : tmpCanvas.height)) if (!tmpCanvas || outCanvas.width !== tmpCanvas.width || (outCanvas == null ? void 0 : outCanvas.height) !== (tmpCanvas == null ? void 0 : tmpCanvas.height))
tmpCanvas = canvas(outCanvas.width, outCanvas.height); tmpCanvas = canvas(outCanvas.width, outCanvas.height);
if (tfjs_esm_exports.browser && env2.browser) { if (tfjs_esm_exports.browser && env.browser) {
if (config3.backend === "webgl" || config3.backend === "humangl" || config3.backend === "webgpu") { if (config3.backend === "webgl" || config3.backend === "humangl" || config3.backend === "webgpu") {
pixels = tfjs_esm_exports.browser.fromPixels(outCanvas); pixels = tfjs_esm_exports.browser.fromPixels(outCanvas);
} else { } else {
@ -1243,11 +1159,125 @@ async function skip(config3, input) {
return skipFrame; return skipFrame;
} }
// src/util/env.ts
var Env = class {
constructor() {
__publicField(this, "browser");
__publicField(this, "node");
__publicField(this, "worker");
__publicField(this, "platform", "");
__publicField(this, "agent", "");
__publicField(this, "backends", []);
__publicField(this, "initial");
__publicField(this, "filter");
__publicField(this, "tfjs");
__publicField(this, "offscreen");
__publicField(this, "wasm", {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
});
__publicField(this, "webgl", {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
});
__publicField(this, "webgpu", {
supported: void 0,
backend: void 0,
adapter: void 0
});
__publicField(this, "cpu", {
model: void 0,
flags: []
});
__publicField(this, "kernels", []);
__publicField(this, "Canvas");
__publicField(this, "Image");
__publicField(this, "ImageData");
this.browser = typeof navigator !== "undefined";
this.node = typeof process !== "undefined";
this.tfjs = { version: tfjs_esm_exports.version_core };
this.offscreen = typeof OffscreenCanvas !== "undefined";
this.initial = true;
this.worker = this.browser && this.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
this.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
this.agent = navigator.userAgent.replace(raw[0], "");
if (this.platform[1])
this.agent = this.agent.replace(raw[1], "");
this.agent = this.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
this.platform = `${process.platform} ${process.arch}`;
this.agent = `NodeJS ${process.version}`;
}
}
async updateBackend() {
var _a;
this.backends = Object.keys(tfjs_esm_exports.engine().registryFactory);
this.wasm.supported = typeof WebAssembly !== "undefined";
this.wasm.backend = this.backends.includes("wasm");
if (this.wasm.supported && this.wasm.backend && tfjs_esm_exports.getBackend() === "wasm") {
this.wasm.simd = await tfjs_esm_exports.env().getAsync("WASM_HAS_SIMD_SUPPORT");
this.wasm.multithread = await tfjs_esm_exports.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
this.webgl.supported = typeof ctx !== "undefined";
this.webgl.backend = this.backends.includes("webgl");
if (this.webgl.supported && this.webgl.backend && (tfjs_esm_exports.getBackend() === "webgl" || tfjs_esm_exports.getBackend() === "humangl")) {
const gl = tfjs_esm_exports.backend().gpgpu !== "undefined" ? await tfjs_esm_exports.backend().getGPGPUContext().gl : null;
if (gl) {
this.webgl.version = gl.getParameter(gl.VERSION);
this.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
this.webgpu.supported = this.browser && typeof navigator["gpu"] !== "undefined";
this.webgpu.backend = this.backends.includes("webgpu");
if (this.webgpu.supported)
this.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
this.kernels = tfjs_esm_exports.getKernelsForBackend(tfjs_esm_exports.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async updateCPU() {
var _a;
const cpu = { model: "", flags: [] };
if (this.node && ((_a = this.platform) == null ? void 0 : _a.startsWith("linux"))) {
const fs = __require("fs");
try {
const data = fs.readFileSync("/proc/cpuinfo").toString();
for (const line of data.split("\n")) {
if (line.startsWith("model name")) {
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
}
if (line.startsWith("flags")) {
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
}
}
} catch (e) {
}
}
if (!this["cpu"])
Object.defineProperty(this, "cpu", { value: cpu });
else
this["cpu"] = cpu;
}
};
var env = new Env();
// package.json
var version10 = "2.3.5";
// src/gear/gear-agegenderrace.ts // src/gear/gear-agegenderrace.ts
var model; var model;
var skipped = Number.MAX_SAFE_INTEGER; var skipped = Number.MAX_SAFE_INTEGER;
async function load(config3) { async function load(config3) {
if (env2.initial) if (env.initial)
model = null; model = null;
if (!model) { if (!model) {
model = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, config3.face.agegenderrace.modelPath)); model = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, config3.face.agegenderrace.modelPath));
@ -1267,7 +1297,7 @@ var skipped2 = Number.MAX_SAFE_INTEGER;
var lastCount = 0; var lastCount = 0;
async function load2(config3) { async function load2(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model2 = null; model2 = null;
if (!model2) { if (!model2) {
model2 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.antispoof) == null ? void 0 : _a.modelPath) || "")); model2 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.antispoof) == null ? void 0 : _a.modelPath) || ""));
@ -4726,7 +4756,7 @@ var inputSize = 0;
var size = () => inputSize; var size = () => inputSize;
async function load3(config3) { async function load3(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model3 = null; model3 = null;
if (!model3) { if (!model3) {
model3 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.detector) == null ? void 0 : _a.modelPath) || "")); model3 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.detector) == null ? void 0 : _a.modelPath) || ""));
@ -5086,7 +5116,7 @@ var inputSize3 = 0;
var last = []; var last = [];
var skipped4 = Number.MAX_SAFE_INTEGER; var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) { async function load4(config3) {
if (env2.initial) if (env.initial)
model4 = null; model4 = null;
if (!model4) { if (!model4) {
fakeOps(["floormod"], config3); fakeOps(["floormod"], config3);
@ -5153,7 +5183,7 @@ async function predict3(input, config3) {
return last; return last;
} }
skipped4 = 0; skipped4 = 0;
if (!env2.kernels.includes("mod") || !env2.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last; return last;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [input.shape[2], input.shape[1]]; const outputSize2 = [input.shape[2], input.shape[1]];
@ -5204,7 +5234,7 @@ var model5;
var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} }; var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };
var skipped5 = Number.MAX_SAFE_INTEGER; var skipped5 = Number.MAX_SAFE_INTEGER;
async function load5(config3) { async function load5(config3) {
if (env2.initial) if (env.initial)
model5 = null; model5 = null;
if (!model5) { if (!model5) {
model5 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || "")); model5 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || ""));
@ -5317,7 +5347,7 @@ var skipped6 = Number.MAX_SAFE_INTEGER;
var rgb = [0.2989, 0.587, 0.114]; var rgb = [0.2989, 0.587, 0.114];
async function load6(config3) { async function load6(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model6 = null; model6 = null;
if (!model6) { if (!model6) {
model6 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.emotion) == null ? void 0 : _a.modelPath) || "")); model6 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.emotion) == null ? void 0 : _a.modelPath) || ""));
@ -5391,7 +5421,7 @@ var irisLandmarks = {
}; };
async function load7(config3) { async function load7(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model7 = null; model7 = null;
if (!model7) { if (!model7) {
model7 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.iris) == null ? void 0 : _a.modelPath) || "")); model7 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.iris) == null ? void 0 : _a.modelPath) || ""));
@ -5436,7 +5466,7 @@ var getEyeBox = (rawCoords, face5, eyeInnerCornerIndex, eyeOuterCornerIndex, fli
box4.endPoint[1] / meshSize, box4.endPoint[1] / meshSize,
box4.endPoint[0] / meshSize box4.endPoint[0] / meshSize
]], [0], [inputSize4, inputSize4]); ]], [0], [inputSize4, inputSize4]);
if (flip && env2.kernels.includes("flipleftright")) { if (flip && env.kernels.includes("flipleftright")) {
const flipped = tfjs_esm_exports.image.flipLeftRight(crop2); const flipped = tfjs_esm_exports.image.flipLeftRight(crop2);
tfjs_esm_exports.dispose(crop2); tfjs_esm_exports.dispose(crop2);
crop2 = flipped; crop2 = flipped;
@ -5550,7 +5580,7 @@ async function predict6(input, config3) {
faceScore: 0, faceScore: 0,
annotations: {} annotations: {}
}; };
if (((_d = config3.face.detector) == null ? void 0 : _d.rotation) && ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) && env2.kernels.includes("rotatewithoffset")) { if (((_d = config3.face.detector) == null ? void 0 : _d.rotation) && ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else { } else {
rotationMatrix = IDENTITY_MATRIX; rotationMatrix = IDENTITY_MATRIX;
@ -5592,7 +5622,7 @@ async function predict6(input, config3) {
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence }; box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]); face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (((_j = config3.face.detector) == null ? void 0 : _j.rotation) && config3.face.mesh.enabled && ((_k = config3.face.description) == null ? void 0 : _k.enabled) && env2.kernels.includes("rotatewithoffset")) { if (((_j = config3.face.detector) == null ? void 0 : _j.rotation) && config3.face.mesh.enabled && ((_k = config3.face.description) == null ? void 0 : _k.enabled) && env.kernels.includes("rotatewithoffset")) {
tfjs_esm_exports.dispose(face5.tensor); tfjs_esm_exports.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} }
@ -5616,7 +5646,7 @@ async function predict6(input, config3) {
} }
async function load8(config3) { async function load8(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model8 = null; model8 = null;
if (!model8) { if (!model8) {
model8 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.mesh) == null ? void 0 : _a.modelPath) || "")); model8 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.mesh) == null ? void 0 : _a.modelPath) || ""));
@ -5642,7 +5672,7 @@ var skipped8 = Number.MAX_SAFE_INTEGER;
async function load9(config3) { async function load9(config3) {
var _a, _b; var _a, _b;
const modelUrl = join(config3.modelBasePath, ((_a = config3.face.description) == null ? void 0 : _a.modelPath) || ""); const modelUrl = join(config3.modelBasePath, ((_a = config3.face.description) == null ? void 0 : _a.modelPath) || "");
if (env2.initial) if (env.initial)
model9 = null; model9 = null;
if (!model9) { if (!model9) {
model9 = await tfjs_esm_exports.loadGraphModel(modelUrl); model9 = await tfjs_esm_exports.loadGraphModel(modelUrl);
@ -8939,7 +8969,7 @@ var HandPipeline = class {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0; const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox); const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]]; const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
const rotatedImage = config3.hand.rotation && env2.kernels.includes("rotatewithoffset") ? tfjs_esm_exports.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone(); const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tfjs_esm_exports.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter); const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox; const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]); const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9441,7 +9471,7 @@ async function predict8(input, config3) {
} }
async function load10(config3) { async function load10(config3) {
var _a, _b, _c, _d, _e, _f; var _a, _b, _c, _d, _e, _f;
if (env2.initial) { if (env.initial) {
handDetectorModel = null; handDetectorModel = null;
handPoseModel = null; handPoseModel = null;
} }
@ -9530,7 +9560,7 @@ var fingerMap = {
}; };
async function loadDetect2(config3) { async function loadDetect2(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
models2[0] = null; models2[0] = null;
if (!models2[0]) { if (!models2[0]) {
fakeOps(["tensorlistreserve", "enter", "tensorlistfromtensor", "merge", "loopcond", "switch", "exit", "tensorliststack", "nextiteration", "tensorlistsetitem", "tensorlistgetitem", "reciprocal", "shape", "split", "where"], config3); fakeOps(["tensorlistreserve", "enter", "tensorlistfromtensor", "merge", "loopcond", "switch", "exit", "tensorliststack", "nextiteration", "tensorlistsetitem", "tensorlistgetitem", "reciprocal", "shape", "split", "where"], config3);
@ -9548,7 +9578,7 @@ async function loadDetect2(config3) {
} }
async function loadSkeleton(config3) { async function loadSkeleton(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
models2[1] = null; models2[1] = null;
if (!models2[1]) { if (!models2[1]) {
models2[1] = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.hand.skeleton) == null ? void 0 : _a.modelPath) || "")); models2[1] = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, ((_a = config3.hand.skeleton) == null ? void 0 : _a.modelPath) || ""));
@ -9853,7 +9883,7 @@ var cache5 = {
bodies: [] bodies: []
}; };
async function load11(config3) { async function load11(config3) {
if (env2.initial) if (env.initial)
model10 = null; model10 = null;
if (!model10) { if (!model10) {
fakeOps(["size"], config3); fakeOps(["size"], config3);
@ -9985,7 +10015,7 @@ var last4 = [];
var skipped11 = Number.MAX_SAFE_INTEGER; var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5; var scaleBox = 2.5;
async function load12(config3) { async function load12(config3) {
if (!model11 || env2.initial) { if (!model11 || env.initial) {
model11 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, config3.object.modelPath || "")); model11 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, config3.object.modelPath || ""));
const inputs = Object.values(model11.modelSignature["inputs"]); const inputs = Object.values(model11.modelSignature["inputs"]);
model11.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null; model11.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
@ -10066,7 +10096,7 @@ async function predict11(image25, config3) {
return last4; return last4;
} }
skipped11 = 0; skipped11 = 0;
if (!env2.kernels.includes("mod") || !env2.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last4; return last4;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]]; const outputSize2 = [image25.shape[2], image25.shape[1]];
@ -10414,7 +10444,7 @@ async function predict12(input, config3) {
return scaled; return scaled;
} }
async function load13(config3) { async function load13(config3) {
if (!model12 || env2.initial) { if (!model12 || env.initial) {
model12 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || "")); model12 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || ""));
if (!model12 || !model12["modelUrl"]) if (!model12 || !model12["modelUrl"])
log("load model failed:", config3.body.modelPath); log("load model failed:", config3.body.modelPath);
@ -10429,7 +10459,7 @@ async function load13(config3) {
var model13; var model13;
var busy = false; var busy = false;
async function load14(config3) { async function load14(config3) {
if (!model13 || env2.initial) { if (!model13 || env.initial) {
model13 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, config3.segmentation.modelPath || "")); model13 = await tfjs_esm_exports.loadGraphModel(join(config3.modelBasePath, config3.segmentation.modelPath || ""));
if (!model13 || !model13["modelUrl"]) if (!model13 || !model13["modelUrl"])
log("load model failed:", config3.segmentation.modelPath); log("load model failed:", config3.segmentation.modelPath);
@ -10468,7 +10498,7 @@ async function process5(input, background, config3) {
t.data = tfjs_esm_exports.image.resizeBilinear(t.squeeze, [height, width]); t.data = tfjs_esm_exports.image.resizeBilinear(t.squeeze, [height, width]);
} }
const data = Array.from(await t.data.data()); const data = Array.from(await t.data.data());
if (env2.node && !env2.Canvas && typeof ImageData === "undefined") { if (env.node && !env.Canvas && typeof ImageData === "undefined") {
if (config3.debug) if (config3.debug)
log("canvas support missing"); log("canvas support missing");
Object.keys(t).forEach((tensor3) => tfjs_esm_exports.dispose(t[tensor3])); Object.keys(t).forEach((tensor3) => tfjs_esm_exports.dispose(t[tensor3]));
@ -10540,7 +10570,7 @@ function reset(instance) {
} }
async function load15(instance) { async function load15(instance) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
if (env2.initial) if (env.initial)
reset(instance); reset(instance);
if (instance.config.hand.enabled) { if (instance.config.hand.enabled) {
if (!instance.models.handpose && ((_b = (_a = instance.config.hand.detector) == null ? void 0 : _a.modelPath) == null ? void 0 : _b.includes("handdetect"))) if (!instance.models.handpose && ((_b = (_a = instance.config.hand.detector) == null ? void 0 : _a.modelPath) == null ? void 0 : _b.includes("handdetect")))
@ -10733,24 +10763,24 @@ async function register(instance) {
// src/tfjs/backend.ts // src/tfjs/backend.ts
async function check(instance, force = false) { async function check(instance, force = false) {
instance.state = "backend"; instance.state = "backend";
if (force || env2.initial || instance.config.backend && instance.config.backend.length > 0 && tfjs_esm_exports.getBackend() !== instance.config.backend) { if (force || env.initial || instance.config.backend && instance.config.backend.length > 0 && tfjs_esm_exports.getBackend() !== instance.config.backend) {
const timeStamp = now(); const timeStamp = now();
if (instance.config.backend && instance.config.backend.length > 0) { if (instance.config.backend && instance.config.backend.length > 0) {
if (typeof window === "undefined" && typeof WorkerGlobalScope !== "undefined" && instance.config.debug) { if (typeof window === "undefined" && typeof WorkerGlobalScope !== "undefined" && instance.config.debug) {
if (instance.config.debug) if (instance.config.debug)
log("running inside web worker"); log("running inside web worker");
} }
if (env2.browser && instance.config.backend === "tensorflow") { if (env.browser && instance.config.backend === "tensorflow") {
if (instance.config.debug) if (instance.config.debug)
log("override: backend set to tensorflow while running in browser"); log("override: backend set to tensorflow while running in browser");
instance.config.backend = "humangl"; instance.config.backend = "humangl";
} }
if (env2.node && (instance.config.backend === "webgl" || instance.config.backend === "humangl")) { if (env.node && (instance.config.backend === "webgl" || instance.config.backend === "humangl")) {
if (instance.config.debug) if (instance.config.debug)
log(`override: backend set to ${instance.config.backend} while running in nodejs`); log(`override: backend set to ${instance.config.backend} while running in nodejs`);
instance.config.backend = "tensorflow"; instance.config.backend = "tensorflow";
} }
if (env2.browser && instance.config.backend === "webgpu") { if (env.browser && instance.config.backend === "webgpu") {
if (typeof navigator === "undefined" || typeof navigator["gpu"] === "undefined") { if (typeof navigator === "undefined" || typeof navigator["gpu"] === "undefined") {
log("override: backend set to webgpu but browser does not support webgpu"); log("override: backend set to webgpu but browser does not support webgpu");
instance.config.backend = "humangl"; instance.config.backend = "humangl";
@ -10767,7 +10797,7 @@ async function check(instance, force = false) {
log("available backends:", available); log("available backends:", available);
if (!available.includes(instance.config.backend)) { if (!available.includes(instance.config.backend)) {
log(`error: backend ${instance.config.backend} not found in registry`); log(`error: backend ${instance.config.backend} not found in registry`);
instance.config.backend = env2.node ? "tensorflow" : "webgl"; instance.config.backend = env.node ? "tensorflow" : "webgl";
if (instance.config.debug) if (instance.config.debug)
log(`override: setting backend ${instance.config.backend}`); log(`override: setting backend ${instance.config.backend}`);
} }
@ -10798,7 +10828,6 @@ async function check(instance, force = false) {
if (tfjs_esm_exports.getBackend() === "humangl") { if (tfjs_esm_exports.getBackend() === "humangl") {
tfjs_esm_exports.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false); tfjs_esm_exports.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
tfjs_esm_exports.ENV.set("WEBGL_CPU_FORWARD", true); tfjs_esm_exports.ENV.set("WEBGL_CPU_FORWARD", true);
tfjs_esm_exports.ENV.set("WEBGL_PACK_DEPTHWISECONV", false);
tfjs_esm_exports.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true); tfjs_esm_exports.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true);
tfjs_esm_exports.ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256); tfjs_esm_exports.ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256);
if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) { if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) {
@ -10812,16 +10841,12 @@ async function check(instance, force = false) {
} }
} }
if (tfjs_esm_exports.getBackend() === "webgpu") { if (tfjs_esm_exports.getBackend() === "webgpu") {
tfjs_esm_exports.ENV.set("WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD", 512);
tfjs_esm_exports.ENV.set("WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE", 0);
tfjs_esm_exports.ENV.set("WEBGPU_CPU_FORWARD", true);
} }
tfjs_esm_exports.enableProdMode(); tfjs_esm_exports.enableProdMode();
await tfjs_esm_exports.ready(); await tfjs_esm_exports.ready();
instance.performance.backend = Math.trunc(now() - timeStamp); instance.performance.backend = Math.trunc(now() - timeStamp);
instance.config.backend = tfjs_esm_exports.getBackend(); instance.config.backend = tfjs_esm_exports.getBackend();
get(); env.updateBackend();
instance.env = env2;
} }
return true; return true;
} }
@ -10837,7 +10862,7 @@ function fakeOps(kernelNames, config3) {
}; };
tfjs_esm_exports.registerKernel(kernelConfig); tfjs_esm_exports.registerKernel(kernelConfig);
} }
env2.kernels = tfjs_esm_exports.getKernelsForBackend(tfjs_esm_exports.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); env.kernels = tfjs_esm_exports.getKernelsForBackend(tfjs_esm_exports.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
} }
// src/util/draw.ts // src/util/draw.ts
@ -12580,8 +12605,8 @@ async function warmupCanvas(instance) {
let img; let img;
if (typeof Image !== "undefined") if (typeof Image !== "undefined")
img = new Image(); img = new Image();
else if (env2.Image) else if (env.Image)
img = new env2.Image(); img = new env.Image();
img.onload = async () => { img.onload = async () => {
const canvas3 = canvas(img.naturalWidth, img.naturalHeight); const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
if (!canvas3) { if (!canvas3) {
@ -12635,7 +12660,7 @@ async function warmup(instance, userConfig) {
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (typeof createImageBitmap === "function") if (typeof createImageBitmap === "function")
res = await warmupBitmap(instance); res = await warmupBitmap(instance);
else if (typeof Image !== "undefined" || env2.Canvas !== void 0) else if (typeof Image !== "undefined" || env.Canvas !== void 0)
res = await warmupCanvas(instance); res = await warmupCanvas(instance);
else else
res = await warmupNode(instance); res = await warmupNode(instance);
@ -12700,11 +12725,10 @@ var Human = class {
if (this.events && this.events.dispatchEvent) if (this.events && this.events.dispatchEvent)
(_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event)); (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
}); });
get(); this.env = env;
this.env = env2;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tfjs_esm_exports.version_core}/dist/`; config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tfjs_esm_exports.version_core}/dist/`;
config.modelBasePath = this.env.browser ? "../models/" : "file://models/"; config.modelBasePath = env.browser ? "../models/" : "file://models/";
config.backend = this.env.browser ? "humangl" : "tensorflow"; config.backend = env.browser ? "humangl" : "tensorflow";
this.version = version10; this.version = version10;
Object.defineProperty(this, "version", { value: version10 }); Object.defineProperty(this, "version", { value: version10 });
this.config = JSON.parse(JSON.stringify(config)); this.config = JSON.parse(JSON.stringify(config));
@ -12760,7 +12784,6 @@ var Human = class {
async init() { async init() {
await check(this, true); await check(this, true);
await this.tf.ready(); await this.tf.ready();
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -12768,7 +12791,7 @@ var Human = class {
const count2 = Object.values(this.models).filter((model14) => model14).length; const count2 = Object.values(this.models).filter((model14) => model14).length;
if (userConfig) if (userConfig)
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (env2.initial) { if (env.initial) {
if (this.config.debug) if (this.config.debug)
log(`version: ${this.version}`); log(`version: ${this.version}`);
if (this.config.debug) if (this.config.debug)
@ -12784,9 +12807,9 @@ var Human = class {
} }
} }
await load15(this); await load15(this);
if (env2.initial && this.config.debug) if (env.initial && this.config.debug)
log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors"); log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors");
env2.initial = false; env.initial = false;
const loaded = Object.values(this.models).filter((model14) => model14).length; const loaded = Object.values(this.models).filter((model14) => model14).length;
if (loaded !== count2) { if (loaded !== count2) {
await validate2(this); await validate2(this);
@ -12971,10 +12994,11 @@ _analyzeMemoryLeaks = new WeakMap();
_checkSanity = new WeakMap(); _checkSanity = new WeakMap();
_sanity = new WeakMap(); _sanity = new WeakMap();
export { export {
Env,
Human, Human,
Models, Models,
Human as default, Human as default,
config as defaults, config as defaults,
env2 as env env
}; };
//# sourceMappingURL=human.esm-nobundle.js.map //# sourceMappingURL=human.esm-nobundle.js.map

File diff suppressed because one or more lines are too long

251
dist/human.esm.js vendored
View File

@ -67731,9 +67731,6 @@ var version92 = {
"tfjs-backend-wasm": version82 "tfjs-backend-wasm": version82
}; };
// package.json
var version6 = "2.3.5";
// src/image/imagefxshaders.ts // src/image/imagefxshaders.ts
var vertexIdentity = ` var vertexIdentity = `
precision highp float; precision highp float;
@ -67875,7 +67872,7 @@ var GLProgram = class {
this.uniform[u] = this.gl.getUniformLocation(this.id, u); this.uniform[u] = this.gl.getUniformLocation(this.id, u);
} }
}; };
function GLImageFilter(params = {}) { function GLImageFilter() {
let drawCount = 0; let drawCount = 0;
let sourceTexture = null; let sourceTexture = null;
let lastInChain = false; let lastInChain = false;
@ -67884,17 +67881,17 @@ function GLImageFilter(params = {}) {
let filterChain = []; let filterChain = [];
let vertexBuffer = null; let vertexBuffer = null;
let currentProgram = null; let currentProgram = null;
const canvas3 = params["canvas"] || typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(100, 100) : document.createElement("canvas"); const fxcanvas = canvas(100, 100);
const shaderProgramCache = {}; const shaderProgramCache = {};
const DRAW = { INTERMEDIATE: 1 }; const DRAW = { INTERMEDIATE: 1 };
const gl = canvas3.getContext("webgl"); const gl = fxcanvas.getContext("webgl");
if (!gl) if (!gl)
throw new Error("filter: cannot get webgl context"); throw new Error("filter: cannot get webgl context");
function resize(width, height) { function resize(width, height) {
if (width === canvas3.width && height === canvas3.height) if (width === fxcanvas.width && height === fxcanvas.height)
return; return;
canvas3.width = width; fxcanvas.width = width;
canvas3.height = height; fxcanvas.height = height;
if (!vertexBuffer) { if (!vertexBuffer) {
const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]);
vertexBuffer = gl.createBuffer(); vertexBuffer = gl.createBuffer();
@ -67902,7 +67899,7 @@ function GLImageFilter(params = {}) {
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true); gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
} }
gl.viewport(0, 0, canvas3.width, canvas3.height); gl.viewport(0, 0, fxcanvas.width, fxcanvas.height);
tempFramebuffers = [null, null]; tempFramebuffers = [null, null];
} }
function createFramebufferTexture(width, height) { function createFramebufferTexture(width, height) {
@ -67923,7 +67920,7 @@ function GLImageFilter(params = {}) {
return { fbo, texture }; return { fbo, texture };
} }
function getTempFramebuffer(index) { function getTempFramebuffer(index) {
tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(canvas3.width, canvas3.height); tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(fxcanvas.width, fxcanvas.height);
return tempFramebuffers[index]; return tempFramebuffers[index];
} }
function draw2(flags = 0) { function draw2(flags = 0) {
@ -68285,8 +68282,8 @@ function GLImageFilter(params = {}) {
}, },
convolution: (matrix) => { convolution: (matrix) => {
const m = new Float32Array(matrix); const m = new Float32Array(matrix);
const pixelSizeX = 1 / canvas3.width; const pixelSizeX = 1 / fxcanvas.width;
const pixelSizeY = 1 / canvas3.height; const pixelSizeY = 1 / fxcanvas.height;
const program = compileShader(convolution); const program = compileShader(convolution);
gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m); gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY);
@ -68360,8 +68357,8 @@ function GLImageFilter(params = {}) {
]); ]);
}, },
blur: (size2) => { blur: (size2) => {
const blurSizeX = size2 / 7 / canvas3.width; const blurSizeX = size2 / 7 / fxcanvas.width;
const blurSizeY = size2 / 7 / canvas3.height; const blurSizeY = size2 / 7 / fxcanvas.height;
const program = compileShader(blur); const program = compileShader(blur);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY);
draw2(DRAW.INTERMEDIATE); draw2(DRAW.INTERMEDIATE);
@ -68369,8 +68366,8 @@ function GLImageFilter(params = {}) {
draw2(); draw2();
}, },
pixelate: (size2) => { pixelate: (size2) => {
const blurSizeX = size2 / canvas3.width; const blurSizeX = size2 / fxcanvas.width;
const blurSizeY = size2 / canvas3.height; const blurSizeY = size2 / fxcanvas.height;
const program = compileShader(pixelate); const program = compileShader(pixelate);
gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY);
draw2(); draw2();
@ -68403,7 +68400,7 @@ function GLImageFilter(params = {}) {
const f = filterChain[i]; const f = filterChain[i];
f.func.apply(this, f.args || []); f.func.apply(this, f.args || []);
} }
return canvas3; return fxcanvas;
}; };
this.draw = function(image7) { this.draw = function(image7) {
this.add("brightness", 0); this.add("brightness", 0);
@ -68411,94 +68408,6 @@ function GLImageFilter(params = {}) {
}; };
} }
// src/util/env.ts
var env2 = {
browser: void 0,
node: void 0,
worker: void 0,
platform: void 0,
agent: void 0,
initial: true,
backends: [],
offscreen: void 0,
filter: void 0,
tfjs: {
version: void 0
},
wasm: {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
},
webgl: {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
},
webgpu: {
supported: void 0,
backend: void 0,
adapter: void 0
},
kernels: [],
Canvas: void 0,
Image: void 0,
ImageData: void 0
};
async function backendInfo() {
var _a;
env2.backends = Object.keys(engine().registryFactory);
env2.wasm.supported = typeof WebAssembly !== "undefined";
env2.wasm.backend = env2.backends.includes("wasm");
if (env2.wasm.supported && env2.wasm.backend && getBackend() === "wasm") {
env2.wasm.simd = await env().getAsync("WASM_HAS_SIMD_SUPPORT");
env2.wasm.multithread = await env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
env2.webgl.supported = typeof ctx !== "undefined";
env2.webgl.backend = env2.backends.includes("webgl");
if (env2.webgl.supported && env2.webgl.backend && (getBackend() === "webgl" || getBackend() === "humangl")) {
const gl = backend().gpgpu !== "undefined" ? await backend().getGPGPUContext().gl : null;
if (gl) {
env2.webgl.version = gl.getParameter(gl.VERSION);
env2.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
env2.webgpu.supported = env2.browser && typeof navigator["gpu"] !== "undefined";
env2.webgpu.backend = env2.backends.includes("webgpu");
if (env2.webgpu.supported)
env2.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
env2.kernels = getKernelsForBackend(getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async function get3() {
env2.browser = typeof navigator !== "undefined";
env2.node = typeof process !== "undefined";
env2.tfjs.version = version;
env2.offscreen = typeof env2.offscreen === "undefined" ? typeof OffscreenCanvas !== "undefined" : env2.offscreen;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
env2.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
env2.agent = navigator.userAgent.replace(raw[0], "");
if (env2.platform[1])
env2.agent = env2.agent.replace(raw[1], "");
env2.agent = env2.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
env2.platform = `${process.platform} ${process.arch}`;
env2.agent = `NodeJS ${process.version}`;
}
env2.worker = env2.browser && env2.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
await backendInfo();
}
async function set(obj) {
env2 = mergeDeep(env2, obj);
}
// src/image/image.ts // src/image/image.ts
var maxSize = 2048; var maxSize = 2048;
var inCanvas = null; var inCanvas = null;
@ -68600,7 +68509,7 @@ function process2(input2, config3, getTensor2 = true) {
outCanvas = canvas(inCanvas.width, inCanvas.height); outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env2.webgl.supported) { if (config3.filter.enabled && env2.webgl.supported) {
if (!fx) if (!fx)
fx = env2.browser ? new GLImageFilter({ canvas: outCanvas }) : null; fx = env2.browser ? new GLImageFilter() : null;
env2.filter = !!fx; env2.filter = !!fx;
if (!fx) if (!fx)
return { tensor: null, canvas: inCanvas }; return { tensor: null, canvas: inCanvas };
@ -68732,6 +68641,120 @@ async function skip(config3, input2) {
return skipFrame; return skipFrame;
} }
// src/util/env.ts
var Env = class {
constructor() {
__publicField(this, "browser");
__publicField(this, "node");
__publicField(this, "worker");
__publicField(this, "platform", "");
__publicField(this, "agent", "");
__publicField(this, "backends", []);
__publicField(this, "initial");
__publicField(this, "filter");
__publicField(this, "tfjs");
__publicField(this, "offscreen");
__publicField(this, "wasm", {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
});
__publicField(this, "webgl", {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
});
__publicField(this, "webgpu", {
supported: void 0,
backend: void 0,
adapter: void 0
});
__publicField(this, "cpu", {
model: void 0,
flags: []
});
__publicField(this, "kernels", []);
__publicField(this, "Canvas");
__publicField(this, "Image");
__publicField(this, "ImageData");
this.browser = typeof navigator !== "undefined";
this.node = typeof process !== "undefined";
this.tfjs = { version };
this.offscreen = typeof OffscreenCanvas !== "undefined";
this.initial = true;
this.worker = this.browser && this.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
this.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
this.agent = navigator.userAgent.replace(raw[0], "");
if (this.platform[1])
this.agent = this.agent.replace(raw[1], "");
this.agent = this.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
this.platform = `${process.platform} ${process.arch}`;
this.agent = `NodeJS ${process.version}`;
}
}
async updateBackend() {
var _a;
this.backends = Object.keys(engine().registryFactory);
this.wasm.supported = typeof WebAssembly !== "undefined";
this.wasm.backend = this.backends.includes("wasm");
if (this.wasm.supported && this.wasm.backend && getBackend() === "wasm") {
this.wasm.simd = await env().getAsync("WASM_HAS_SIMD_SUPPORT");
this.wasm.multithread = await env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
this.webgl.supported = typeof ctx !== "undefined";
this.webgl.backend = this.backends.includes("webgl");
if (this.webgl.supported && this.webgl.backend && (getBackend() === "webgl" || getBackend() === "humangl")) {
const gl = backend().gpgpu !== "undefined" ? await backend().getGPGPUContext().gl : null;
if (gl) {
this.webgl.version = gl.getParameter(gl.VERSION);
this.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
this.webgpu.supported = this.browser && typeof navigator["gpu"] !== "undefined";
this.webgpu.backend = this.backends.includes("webgpu");
if (this.webgpu.supported)
this.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
this.kernels = getKernelsForBackend(getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async updateCPU() {
var _a;
const cpu = { model: "", flags: [] };
if (this.node && ((_a = this.platform) == null ? void 0 : _a.startsWith("linux"))) {
const fs = __require("fs");
try {
const data = fs.readFileSync("/proc/cpuinfo").toString();
for (const line of data.split("\n")) {
if (line.startsWith("model name")) {
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
}
if (line.startsWith("flags")) {
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
}
}
} catch (e) {
}
}
if (!this["cpu"])
Object.defineProperty(this, "cpu", { value: cpu });
else
this["cpu"] = cpu;
}
};
var env2 = new Env();
// package.json
var version6 = "2.3.5";
// src/gear/gear-agegenderrace.ts // src/gear/gear-agegenderrace.ts
var model2; var model2;
var skipped = Number.MAX_SAFE_INTEGER; var skipped = Number.MAX_SAFE_INTEGER;
@ -78287,7 +78310,6 @@ async function check(instance, force = false) {
if (getBackend() === "humangl") { if (getBackend() === "humangl") {
ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false); ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
ENV.set("WEBGL_CPU_FORWARD", true); ENV.set("WEBGL_CPU_FORWARD", true);
ENV.set("WEBGL_PACK_DEPTHWISECONV", false);
ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true); ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true);
ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256); ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256);
if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) { if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) {
@ -78301,16 +78323,12 @@ async function check(instance, force = false) {
} }
} }
if (getBackend() === "webgpu") { if (getBackend() === "webgpu") {
ENV.set("WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD", 512);
ENV.set("WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE", 0);
ENV.set("WEBGPU_CPU_FORWARD", true);
} }
enableProdMode(); enableProdMode();
await ready(); await ready();
instance.performance.backend = Math.trunc(now() - timeStamp); instance.performance.backend = Math.trunc(now() - timeStamp);
instance.config.backend = getBackend(); instance.config.backend = getBackend();
get3(); env2.updateBackend();
instance.env = env2;
} }
return true; return true;
} }
@ -80189,11 +80207,10 @@ var Human = class {
if (this.events && this.events.dispatchEvent) if (this.events && this.events.dispatchEvent)
(_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event)); (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
}); });
get3();
this.env = env2; this.env = env2;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${version}/dist/`; config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${version}/dist/`;
config.modelBasePath = this.env.browser ? "../models/" : "file://models/"; config.modelBasePath = env2.browser ? "../models/" : "file://models/";
config.backend = this.env.browser ? "humangl" : "tensorflow"; config.backend = env2.browser ? "humangl" : "tensorflow";
this.version = version6; this.version = version6;
Object.defineProperty(this, "version", { value: version6 }); Object.defineProperty(this, "version", { value: version6 });
this.config = JSON.parse(JSON.stringify(config)); this.config = JSON.parse(JSON.stringify(config));
@ -80249,7 +80266,6 @@ var Human = class {
async init() { async init() {
await check(this, true); await check(this, true);
await this.tf.ready(); await this.tf.ready();
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -80460,6 +80476,7 @@ _analyzeMemoryLeaks = new WeakMap();
_checkSanity = new WeakMap(); _checkSanity = new WeakMap();
_sanity = new WeakMap(); _sanity = new WeakMap();
export { export {
Env,
Human, Human,
Models, Models,
Human as default, Human as default,

File diff suppressed because one or more lines are too long

625
dist/human.js vendored

File diff suppressed because one or more lines are too long

396
dist/human.node-gpu.js vendored
View File

@ -82,11 +82,12 @@ var require_tfjs_esm = __commonJS({
// src/human.ts // src/human.ts
__export(exports, { __export(exports, {
Env: () => Env,
Human: () => Human, Human: () => Human,
Models: () => Models, Models: () => Models,
default: () => Human, default: () => Human,
defaults: () => config, defaults: () => config,
env: () => env2 env: () => env
}); });
// src/util/util.ts // src/util/util.ts
@ -253,17 +254,11 @@ var config = {
} }
}; };
// src/human.ts // src/util/env.ts
var tf28 = __toModule(require_tfjs_esm()); var tf2 = __toModule(require_tfjs_esm());
// package.json
var version = "2.3.5";
// src/tfjs/humangl.ts
var tf24 = __toModule(require_tfjs_esm());
// src/image/image.ts // src/image/image.ts
var tf2 = __toModule(require_tfjs_esm()); var tf = __toModule(require_tfjs_esm());
// src/image/imagefxshaders.ts // src/image/imagefxshaders.ts
var vertexIdentity = ` var vertexIdentity = `
@ -406,7 +401,7 @@ var GLProgram = class {
this.uniform[u] = this.gl.getUniformLocation(this.id, u); this.uniform[u] = this.gl.getUniformLocation(this.id, u);
} }
}; };
function GLImageFilter(params = {}) { function GLImageFilter() {
let drawCount = 0; let drawCount = 0;
let sourceTexture = null; let sourceTexture = null;
let lastInChain = false; let lastInChain = false;
@ -415,17 +410,17 @@ function GLImageFilter(params = {}) {
let filterChain = []; let filterChain = [];
let vertexBuffer = null; let vertexBuffer = null;
let currentProgram = null; let currentProgram = null;
const canvas3 = params["canvas"] || typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(100, 100) : document.createElement("canvas"); const fxcanvas = canvas(100, 100);
const shaderProgramCache = {}; const shaderProgramCache = {};
const DRAW = { INTERMEDIATE: 1 }; const DRAW = { INTERMEDIATE: 1 };
const gl = canvas3.getContext("webgl"); const gl = fxcanvas.getContext("webgl");
if (!gl) if (!gl)
throw new Error("filter: cannot get webgl context"); throw new Error("filter: cannot get webgl context");
function resize(width, height) { function resize(width, height) {
if (width === canvas3.width && height === canvas3.height) if (width === fxcanvas.width && height === fxcanvas.height)
return; return;
canvas3.width = width; fxcanvas.width = width;
canvas3.height = height; fxcanvas.height = height;
if (!vertexBuffer) { if (!vertexBuffer) {
const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]);
vertexBuffer = gl.createBuffer(); vertexBuffer = gl.createBuffer();
@ -433,7 +428,7 @@ function GLImageFilter(params = {}) {
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true); gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
} }
gl.viewport(0, 0, canvas3.width, canvas3.height); gl.viewport(0, 0, fxcanvas.width, fxcanvas.height);
tempFramebuffers = [null, null]; tempFramebuffers = [null, null];
} }
function createFramebufferTexture(width, height) { function createFramebufferTexture(width, height) {
@ -454,7 +449,7 @@ function GLImageFilter(params = {}) {
return { fbo, texture }; return { fbo, texture };
} }
function getTempFramebuffer(index) { function getTempFramebuffer(index) {
tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(canvas3.width, canvas3.height); tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(fxcanvas.width, fxcanvas.height);
return tempFramebuffers[index]; return tempFramebuffers[index];
} }
function draw2(flags = 0) { function draw2(flags = 0) {
@ -816,8 +811,8 @@ function GLImageFilter(params = {}) {
}, },
convolution: (matrix) => { convolution: (matrix) => {
const m = new Float32Array(matrix); const m = new Float32Array(matrix);
const pixelSizeX = 1 / canvas3.width; const pixelSizeX = 1 / fxcanvas.width;
const pixelSizeY = 1 / canvas3.height; const pixelSizeY = 1 / fxcanvas.height;
const program = compileShader(convolution); const program = compileShader(convolution);
gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m); gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY);
@ -891,8 +886,8 @@ function GLImageFilter(params = {}) {
]); ]);
}, },
blur: (size2) => { blur: (size2) => {
const blurSizeX = size2 / 7 / canvas3.width; const blurSizeX = size2 / 7 / fxcanvas.width;
const blurSizeY = size2 / 7 / canvas3.height; const blurSizeY = size2 / 7 / fxcanvas.height;
const program = compileShader(blur); const program = compileShader(blur);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY);
draw2(DRAW.INTERMEDIATE); draw2(DRAW.INTERMEDIATE);
@ -900,8 +895,8 @@ function GLImageFilter(params = {}) {
draw2(); draw2();
}, },
pixelate: (size2) => { pixelate: (size2) => {
const blurSizeX = size2 / canvas3.width; const blurSizeX = size2 / fxcanvas.width;
const blurSizeY = size2 / canvas3.height; const blurSizeY = size2 / fxcanvas.height;
const program = compileShader(pixelate); const program = compileShader(pixelate);
gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY);
draw2(); draw2();
@ -934,7 +929,7 @@ function GLImageFilter(params = {}) {
const f = filterChain[i]; const f = filterChain[i];
f.func.apply(this, f.args || []); f.func.apply(this, f.args || []);
} }
return canvas3; return fxcanvas;
}; };
this.draw = function(image25) { this.draw = function(image25) {
this.add("brightness", 0); this.add("brightness", 0);
@ -942,95 +937,6 @@ function GLImageFilter(params = {}) {
}; };
} }
// src/util/env.ts
var tf = __toModule(require_tfjs_esm());
var env2 = {
browser: void 0,
node: void 0,
worker: void 0,
platform: void 0,
agent: void 0,
initial: true,
backends: [],
offscreen: void 0,
filter: void 0,
tfjs: {
version: void 0
},
wasm: {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
},
webgl: {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
},
webgpu: {
supported: void 0,
backend: void 0,
adapter: void 0
},
kernels: [],
Canvas: void 0,
Image: void 0,
ImageData: void 0
};
async function backendInfo() {
var _a;
env2.backends = Object.keys(tf.engine().registryFactory);
env2.wasm.supported = typeof WebAssembly !== "undefined";
env2.wasm.backend = env2.backends.includes("wasm");
if (env2.wasm.supported && env2.wasm.backend && tf.getBackend() === "wasm") {
env2.wasm.simd = await tf.env().getAsync("WASM_HAS_SIMD_SUPPORT");
env2.wasm.multithread = await tf.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
env2.webgl.supported = typeof ctx !== "undefined";
env2.webgl.backend = env2.backends.includes("webgl");
if (env2.webgl.supported && env2.webgl.backend && (tf.getBackend() === "webgl" || tf.getBackend() === "humangl")) {
const gl = tf.backend().gpgpu !== "undefined" ? await tf.backend().getGPGPUContext().gl : null;
if (gl) {
env2.webgl.version = gl.getParameter(gl.VERSION);
env2.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
env2.webgpu.supported = env2.browser && typeof navigator["gpu"] !== "undefined";
env2.webgpu.backend = env2.backends.includes("webgpu");
if (env2.webgpu.supported)
env2.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
env2.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async function get() {
env2.browser = typeof navigator !== "undefined";
env2.node = typeof process !== "undefined";
env2.tfjs.version = tf.version_core;
env2.offscreen = typeof env2.offscreen === "undefined" ? typeof OffscreenCanvas !== "undefined" : env2.offscreen;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
env2.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
env2.agent = navigator.userAgent.replace(raw[0], "");
if (env2.platform[1])
env2.agent = env2.agent.replace(raw[1], "");
env2.agent = env2.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
env2.platform = `${process.platform} ${process.arch}`;
env2.agent = `NodeJS ${process.version}`;
}
env2.worker = env2.browser && env2.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
await backendInfo();
}
async function set(obj) {
env2 = mergeDeep(env2, obj);
}
// src/image/image.ts // src/image/image.ts
var maxSize = 2048; var maxSize = 2048;
var inCanvas = null; var inCanvas = null;
@ -1039,8 +945,8 @@ var tmpCanvas = null;
var fx; var fx;
function canvas(width, height) { function canvas(width, height) {
let c; let c;
if (env2.browser) { if (env.browser) {
if (env2.offscreen) { if (env.offscreen) {
c = new OffscreenCanvas(width, height); c = new OffscreenCanvas(width, height);
} else { } else {
if (typeof document === "undefined") if (typeof document === "undefined")
@ -1050,8 +956,8 @@ function canvas(width, height) {
c.height = height; c.height = height;
} }
} else { } else {
if (typeof env2.Canvas !== "undefined") if (typeof env.Canvas !== "undefined")
c = new env2.Canvas(width, height); c = new env.Canvas(width, height);
else if (typeof globalThis.Canvas !== "undefined") else if (typeof globalThis.Canvas !== "undefined")
c = new globalThis.Canvas(width, height); c = new globalThis.Canvas(width, height);
} }
@ -1069,16 +975,16 @@ function process2(input, config3, getTensor = true) {
log("input is missing"); log("input is missing");
return { tensor: null, canvas: null }; return { tensor: null, canvas: null };
} }
if (!(input instanceof tf2.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env2.Canvas !== "undefined" && input instanceof env2.Canvas) && !(typeof globalThis.Canvas !== "undefined" && input instanceof globalThis.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) { if (!(input instanceof tf.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof globalThis.Canvas !== "undefined" && input instanceof globalThis.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
throw new Error("input type is not recognized"); throw new Error("input type is not recognized");
} }
if (input instanceof tf2.Tensor) { if (input instanceof tf.Tensor) {
if (input["isDisposedInternal"]) { if (input["isDisposedInternal"]) {
throw new Error("input tensor is disposed"); throw new Error("input tensor is disposed");
} else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) { } else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) {
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input["shape"]}`); throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input["shape"]}`);
} else { } else {
return { tensor: tf2.clone(input), canvas: config3.filter.return ? outCanvas : null }; return { tensor: tf.clone(input), canvas: config3.filter.return ? outCanvas : null };
} }
} else { } else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) { if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
@ -1130,10 +1036,10 @@ function process2(input, config3, getTensor = true) {
} }
if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height)) if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height))
outCanvas = canvas(inCanvas.width, inCanvas.height); outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env2.webgl.supported) { if (config3.filter.enabled && env.webgl.supported) {
if (!fx) if (!fx)
fx = env2.browser ? new GLImageFilter({ canvas: outCanvas }) : null; fx = env.browser ? new GLImageFilter() : null;
env2.filter = !!fx; env.filter = !!fx;
if (!fx) if (!fx)
return { tensor: null, canvas: inCanvas }; return { tensor: null, canvas: inCanvas };
fx.reset(); fx.reset();
@ -1173,7 +1079,7 @@ function process2(input, config3, getTensor = true) {
copy(inCanvas, outCanvas); copy(inCanvas, outCanvas);
if (fx) if (fx)
fx = null; fx = null;
env2.filter = !!fx; env.filter = !!fx;
} }
if (!getTensor) if (!getTensor)
return { tensor: null, canvas: outCanvas }; return { tensor: null, canvas: outCanvas };
@ -1182,22 +1088,22 @@ function process2(input, config3, getTensor = true) {
let pixels; let pixels;
let depth = 3; let depth = 3;
if (typeof ImageData !== "undefined" && input instanceof ImageData || input["data"] && input["width"] && input["height"]) { if (typeof ImageData !== "undefined" && input instanceof ImageData || input["data"] && input["width"] && input["height"]) {
if (env2.browser && tf2.browser) { if (env.browser && tf.browser) {
pixels = tf2.browser ? tf2.browser.fromPixels(input) : null; pixels = tf.browser ? tf.browser.fromPixels(input) : null;
} else { } else {
depth = input["data"].length / input["height"] / input["width"]; depth = input["data"].length / input["height"] / input["width"];
const arr = new Uint8Array(input["data"]["buffer"]); const arr = new Uint8Array(input["data"]["buffer"]);
pixels = tf2.tensor(arr, [input["height"], input["width"], depth], "int32"); pixels = tf.tensor(arr, [input["height"], input["width"], depth], "int32");
} }
} else { } else {
if (!tmpCanvas || outCanvas.width !== tmpCanvas.width || (outCanvas == null ? void 0 : outCanvas.height) !== (tmpCanvas == null ? void 0 : tmpCanvas.height)) if (!tmpCanvas || outCanvas.width !== tmpCanvas.width || (outCanvas == null ? void 0 : outCanvas.height) !== (tmpCanvas == null ? void 0 : tmpCanvas.height))
tmpCanvas = canvas(outCanvas.width, outCanvas.height); tmpCanvas = canvas(outCanvas.width, outCanvas.height);
if (tf2.browser && env2.browser) { if (tf.browser && env.browser) {
if (config3.backend === "webgl" || config3.backend === "humangl" || config3.backend === "webgpu") { if (config3.backend === "webgl" || config3.backend === "humangl" || config3.backend === "webgpu") {
pixels = tf2.browser.fromPixels(outCanvas); pixels = tf.browser.fromPixels(outCanvas);
} else { } else {
tmpCanvas = copy(outCanvas); tmpCanvas = copy(outCanvas);
pixels = tf2.browser.fromPixels(tmpCanvas); pixels = tf.browser.fromPixels(tmpCanvas);
} }
} else { } else {
const tempCanvas = copy(outCanvas); const tempCanvas = copy(outCanvas);
@ -1205,19 +1111,19 @@ function process2(input, config3, getTensor = true) {
const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight); const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
depth = tempData.data.length / targetWidth / targetHeight; depth = tempData.data.length / targetWidth / targetHeight;
const arr = new Uint8Array(tempData.data.buffer); const arr = new Uint8Array(tempData.data.buffer);
pixels = tf2.tensor(arr, [targetWidth, targetHeight, depth]); pixels = tf.tensor(arr, [targetWidth, targetHeight, depth]);
} }
} }
if (depth === 4) { if (depth === 4) {
const rgb2 = tf2.slice3d(pixels, [0, 0, 0], [-1, -1, 3]); const rgb2 = tf.slice3d(pixels, [0, 0, 0], [-1, -1, 3]);
tf2.dispose(pixels); tf.dispose(pixels);
pixels = rgb2; pixels = rgb2;
} }
if (!pixels) if (!pixels)
throw new Error("cannot create tensor from input"); throw new Error("cannot create tensor from input");
const casted = tf2.cast(pixels, "float32"); const casted = tf.cast(pixels, "float32");
const tensor3 = tf2.expandDims(casted, 0); const tensor3 = tf.expandDims(casted, 0);
tf2.dispose([pixels, casted]); tf.dispose([pixels, casted]);
return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null }; return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null };
} }
} }
@ -1226,11 +1132,11 @@ var lastCacheDiff = 1;
var benchmarked = 0; var benchmarked = 0;
var checksum = async (input) => { var checksum = async (input) => {
const resizeFact = 48; const resizeFact = 48;
const reduced = tf2.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]); const reduced = tf.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]);
const tfSum = async () => { const tfSum = async () => {
const sumT = tf2.sum(reduced); const sumT = tf.sum(reduced);
const sum0 = await sumT.data(); const sum0 = await sumT.data();
tf2.dispose(sumT); tf.dispose(sumT);
return sum0[0]; return sum0[0];
}; };
const jsSum = async () => { const jsSum = async () => {
@ -1249,7 +1155,7 @@ var checksum = async (input) => {
benchmarked = t1 - t0 < t2 - t1 ? 1 : 2; benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;
} }
const res = benchmarked === 1 ? await jsSum() : await tfSum(); const res = benchmarked === 1 ? await jsSum() : await tfSum();
tf2.dispose(reduced); tf.dispose(reduced);
return res; return res;
}; };
async function skip(config3, input) { async function skip(config3, input) {
@ -1264,12 +1170,132 @@ async function skip(config3, input) {
return skipFrame; return skipFrame;
} }
// src/util/env.ts
var Env = class {
constructor() {
__publicField(this, "browser");
__publicField(this, "node");
__publicField(this, "worker");
__publicField(this, "platform", "");
__publicField(this, "agent", "");
__publicField(this, "backends", []);
__publicField(this, "initial");
__publicField(this, "filter");
__publicField(this, "tfjs");
__publicField(this, "offscreen");
__publicField(this, "wasm", {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
});
__publicField(this, "webgl", {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
});
__publicField(this, "webgpu", {
supported: void 0,
backend: void 0,
adapter: void 0
});
__publicField(this, "cpu", {
model: void 0,
flags: []
});
__publicField(this, "kernels", []);
__publicField(this, "Canvas");
__publicField(this, "Image");
__publicField(this, "ImageData");
this.browser = typeof navigator !== "undefined";
this.node = typeof process !== "undefined";
this.tfjs = { version: tf2.version_core };
this.offscreen = typeof OffscreenCanvas !== "undefined";
this.initial = true;
this.worker = this.browser && this.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
this.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
this.agent = navigator.userAgent.replace(raw[0], "");
if (this.platform[1])
this.agent = this.agent.replace(raw[1], "");
this.agent = this.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
this.platform = `${process.platform} ${process.arch}`;
this.agent = `NodeJS ${process.version}`;
}
}
async updateBackend() {
var _a;
this.backends = Object.keys(tf2.engine().registryFactory);
this.wasm.supported = typeof WebAssembly !== "undefined";
this.wasm.backend = this.backends.includes("wasm");
if (this.wasm.supported && this.wasm.backend && tf2.getBackend() === "wasm") {
this.wasm.simd = await tf2.env().getAsync("WASM_HAS_SIMD_SUPPORT");
this.wasm.multithread = await tf2.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
this.webgl.supported = typeof ctx !== "undefined";
this.webgl.backend = this.backends.includes("webgl");
if (this.webgl.supported && this.webgl.backend && (tf2.getBackend() === "webgl" || tf2.getBackend() === "humangl")) {
const gl = tf2.backend().gpgpu !== "undefined" ? await tf2.backend().getGPGPUContext().gl : null;
if (gl) {
this.webgl.version = gl.getParameter(gl.VERSION);
this.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
this.webgpu.supported = this.browser && typeof navigator["gpu"] !== "undefined";
this.webgpu.backend = this.backends.includes("webgpu");
if (this.webgpu.supported)
this.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
this.kernels = tf2.getKernelsForBackend(tf2.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async updateCPU() {
var _a;
const cpu = { model: "", flags: [] };
if (this.node && ((_a = this.platform) == null ? void 0 : _a.startsWith("linux"))) {
const fs = require("fs");
try {
const data = fs.readFileSync("/proc/cpuinfo").toString();
for (const line of data.split("\n")) {
if (line.startsWith("model name")) {
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
}
if (line.startsWith("flags")) {
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
}
}
} catch (e) {
}
}
if (!this["cpu"])
Object.defineProperty(this, "cpu", { value: cpu });
else
this["cpu"] = cpu;
}
};
var env = new Env();
// src/human.ts
var tf28 = __toModule(require_tfjs_esm());
// package.json
var version = "2.3.5";
// src/tfjs/humangl.ts
var tf24 = __toModule(require_tfjs_esm());
// src/gear/gear-agegenderrace.ts // src/gear/gear-agegenderrace.ts
var tf3 = __toModule(require_tfjs_esm()); var tf3 = __toModule(require_tfjs_esm());
var model; var model;
var skipped = Number.MAX_SAFE_INTEGER; var skipped = Number.MAX_SAFE_INTEGER;
async function load(config3) { async function load(config3) {
if (env2.initial) if (env.initial)
model = null; model = null;
if (!model) { if (!model) {
model = await tf3.loadGraphModel(join(config3.modelBasePath, config3.face.agegenderrace.modelPath)); model = await tf3.loadGraphModel(join(config3.modelBasePath, config3.face.agegenderrace.modelPath));
@ -1290,7 +1316,7 @@ var skipped2 = Number.MAX_SAFE_INTEGER;
var lastCount = 0; var lastCount = 0;
async function load2(config3) { async function load2(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model2 = null; model2 = null;
if (!model2) { if (!model2) {
model2 = await tf4.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.antispoof) == null ? void 0 : _a.modelPath) || "")); model2 = await tf4.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.antispoof) == null ? void 0 : _a.modelPath) || ""));
@ -4755,7 +4781,7 @@ var inputSize = 0;
var size = () => inputSize; var size = () => inputSize;
async function load3(config3) { async function load3(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model3 = null; model3 = null;
if (!model3) { if (!model3) {
model3 = await tf6.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.detector) == null ? void 0 : _a.modelPath) || "")); model3 = await tf6.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.detector) == null ? void 0 : _a.modelPath) || ""));
@ -5121,7 +5147,7 @@ var inputSize3 = 0;
var last = []; var last = [];
var skipped4 = Number.MAX_SAFE_INTEGER; var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) { async function load4(config3) {
if (env2.initial) if (env.initial)
model4 = null; model4 = null;
if (!model4) { if (!model4) {
fakeOps(["floormod"], config3); fakeOps(["floormod"], config3);
@ -5188,7 +5214,7 @@ async function predict3(input, config3) {
return last; return last;
} }
skipped4 = 0; skipped4 = 0;
if (!env2.kernels.includes("mod") || !env2.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last; return last;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [input.shape[2], input.shape[1]]; const outputSize2 = [input.shape[2], input.shape[1]];
@ -5242,7 +5268,7 @@ var model5;
var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} }; var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };
var skipped5 = Number.MAX_SAFE_INTEGER; var skipped5 = Number.MAX_SAFE_INTEGER;
async function load5(config3) { async function load5(config3) {
if (env2.initial) if (env.initial)
model5 = null; model5 = null;
if (!model5) { if (!model5) {
model5 = await tf9.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || "")); model5 = await tf9.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || ""));
@ -5356,7 +5382,7 @@ var skipped6 = Number.MAX_SAFE_INTEGER;
var rgb = [0.2989, 0.587, 0.114]; var rgb = [0.2989, 0.587, 0.114];
async function load6(config3) { async function load6(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model6 = null; model6 = null;
if (!model6) { if (!model6) {
model6 = await tf10.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.emotion) == null ? void 0 : _a.modelPath) || "")); model6 = await tf10.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.emotion) == null ? void 0 : _a.modelPath) || ""));
@ -5434,7 +5460,7 @@ var irisLandmarks = {
}; };
async function load7(config3) { async function load7(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model7 = null; model7 = null;
if (!model7) { if (!model7) {
model7 = await tf11.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.iris) == null ? void 0 : _a.modelPath) || "")); model7 = await tf11.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.iris) == null ? void 0 : _a.modelPath) || ""));
@ -5479,7 +5505,7 @@ var getEyeBox = (rawCoords, face5, eyeInnerCornerIndex, eyeOuterCornerIndex, fli
box4.endPoint[1] / meshSize, box4.endPoint[1] / meshSize,
box4.endPoint[0] / meshSize box4.endPoint[0] / meshSize
]], [0], [inputSize4, inputSize4]); ]], [0], [inputSize4, inputSize4]);
if (flip && env2.kernels.includes("flipleftright")) { if (flip && env.kernels.includes("flipleftright")) {
const flipped = tf11.image.flipLeftRight(crop2); const flipped = tf11.image.flipLeftRight(crop2);
tf11.dispose(crop2); tf11.dispose(crop2);
crop2 = flipped; crop2 = flipped;
@ -5593,7 +5619,7 @@ async function predict6(input, config3) {
faceScore: 0, faceScore: 0,
annotations: {} annotations: {}
}; };
if (((_d = config3.face.detector) == null ? void 0 : _d.rotation) && ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) && env2.kernels.includes("rotatewithoffset")) { if (((_d = config3.face.detector) == null ? void 0 : _d.rotation) && ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else { } else {
rotationMatrix = IDENTITY_MATRIX; rotationMatrix = IDENTITY_MATRIX;
@ -5635,7 +5661,7 @@ async function predict6(input, config3) {
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence }; box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]); face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (((_j = config3.face.detector) == null ? void 0 : _j.rotation) && config3.face.mesh.enabled && ((_k = config3.face.description) == null ? void 0 : _k.enabled) && env2.kernels.includes("rotatewithoffset")) { if (((_j = config3.face.detector) == null ? void 0 : _j.rotation) && config3.face.mesh.enabled && ((_k = config3.face.description) == null ? void 0 : _k.enabled) && env.kernels.includes("rotatewithoffset")) {
tf12.dispose(face5.tensor); tf12.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} }
@ -5659,7 +5685,7 @@ async function predict6(input, config3) {
} }
async function load8(config3) { async function load8(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model8 = null; model8 = null;
if (!model8) { if (!model8) {
model8 = await tf12.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.mesh) == null ? void 0 : _a.modelPath) || "")); model8 = await tf12.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.mesh) == null ? void 0 : _a.modelPath) || ""));
@ -5686,7 +5712,7 @@ var skipped8 = Number.MAX_SAFE_INTEGER;
async function load9(config3) { async function load9(config3) {
var _a, _b; var _a, _b;
const modelUrl = join(config3.modelBasePath, ((_a = config3.face.description) == null ? void 0 : _a.modelPath) || ""); const modelUrl = join(config3.modelBasePath, ((_a = config3.face.description) == null ? void 0 : _a.modelPath) || "");
if (env2.initial) if (env.initial)
model9 = null; model9 = null;
if (!model9) { if (!model9) {
model9 = await tf13.loadGraphModel(modelUrl); model9 = await tf13.loadGraphModel(modelUrl);
@ -8991,7 +9017,7 @@ var HandPipeline = class {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0; const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox); const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]]; const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
const rotatedImage = config3.hand.rotation && env2.kernels.includes("rotatewithoffset") ? tf16.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone(); const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf16.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter); const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox; const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]); const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9493,7 +9519,7 @@ async function predict8(input, config3) {
} }
async function load10(config3) { async function load10(config3) {
var _a, _b, _c, _d, _e, _f; var _a, _b, _c, _d, _e, _f;
if (env2.initial) { if (env.initial) {
handDetectorModel = null; handDetectorModel = null;
handPoseModel = null; handPoseModel = null;
} }
@ -9583,7 +9609,7 @@ var fingerMap = {
}; };
async function loadDetect2(config3) { async function loadDetect2(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
models2[0] = null; models2[0] = null;
if (!models2[0]) { if (!models2[0]) {
fakeOps(["tensorlistreserve", "enter", "tensorlistfromtensor", "merge", "loopcond", "switch", "exit", "tensorliststack", "nextiteration", "tensorlistsetitem", "tensorlistgetitem", "reciprocal", "shape", "split", "where"], config3); fakeOps(["tensorlistreserve", "enter", "tensorlistfromtensor", "merge", "loopcond", "switch", "exit", "tensorliststack", "nextiteration", "tensorlistsetitem", "tensorlistgetitem", "reciprocal", "shape", "split", "where"], config3);
@ -9601,7 +9627,7 @@ async function loadDetect2(config3) {
} }
async function loadSkeleton(config3) { async function loadSkeleton(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
models2[1] = null; models2[1] = null;
if (!models2[1]) { if (!models2[1]) {
models2[1] = await tf18.loadGraphModel(join(config3.modelBasePath, ((_a = config3.hand.skeleton) == null ? void 0 : _a.modelPath) || "")); models2[1] = await tf18.loadGraphModel(join(config3.modelBasePath, ((_a = config3.hand.skeleton) == null ? void 0 : _a.modelPath) || ""));
@ -9910,7 +9936,7 @@ var cache5 = {
bodies: [] bodies: []
}; };
async function load11(config3) { async function load11(config3) {
if (env2.initial) if (env.initial)
model10 = null; model10 = null;
if (!model10) { if (!model10) {
fakeOps(["size"], config3); fakeOps(["size"], config3);
@ -10043,7 +10069,7 @@ var last4 = [];
var skipped11 = Number.MAX_SAFE_INTEGER; var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5; var scaleBox = 2.5;
async function load12(config3) { async function load12(config3) {
if (!model11 || env2.initial) { if (!model11 || env.initial) {
model11 = await tf21.loadGraphModel(join(config3.modelBasePath, config3.object.modelPath || "")); model11 = await tf21.loadGraphModel(join(config3.modelBasePath, config3.object.modelPath || ""));
const inputs = Object.values(model11.modelSignature["inputs"]); const inputs = Object.values(model11.modelSignature["inputs"]);
model11.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null; model11.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
@ -10124,7 +10150,7 @@ async function predict11(image25, config3) {
return last4; return last4;
} }
skipped11 = 0; skipped11 = 0;
if (!env2.kernels.includes("mod") || !env2.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last4; return last4;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]]; const outputSize2 = [image25.shape[2], image25.shape[1]];
@ -10475,7 +10501,7 @@ async function predict12(input, config3) {
return scaled; return scaled;
} }
async function load13(config3) { async function load13(config3) {
if (!model12 || env2.initial) { if (!model12 || env.initial) {
model12 = await tf22.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || "")); model12 = await tf22.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || ""));
if (!model12 || !model12["modelUrl"]) if (!model12 || !model12["modelUrl"])
log("load model failed:", config3.body.modelPath); log("load model failed:", config3.body.modelPath);
@ -10491,7 +10517,7 @@ var tf23 = __toModule(require_tfjs_esm());
var model13; var model13;
var busy = false; var busy = false;
async function load14(config3) { async function load14(config3) {
if (!model13 || env2.initial) { if (!model13 || env.initial) {
model13 = await tf23.loadGraphModel(join(config3.modelBasePath, config3.segmentation.modelPath || "")); model13 = await tf23.loadGraphModel(join(config3.modelBasePath, config3.segmentation.modelPath || ""));
if (!model13 || !model13["modelUrl"]) if (!model13 || !model13["modelUrl"])
log("load model failed:", config3.segmentation.modelPath); log("load model failed:", config3.segmentation.modelPath);
@ -10530,7 +10556,7 @@ async function process5(input, background, config3) {
t.data = tf23.image.resizeBilinear(t.squeeze, [height, width]); t.data = tf23.image.resizeBilinear(t.squeeze, [height, width]);
} }
const data = Array.from(await t.data.data()); const data = Array.from(await t.data.data());
if (env2.node && !env2.Canvas && typeof ImageData === "undefined") { if (env.node && !env.Canvas && typeof ImageData === "undefined") {
if (config3.debug) if (config3.debug)
log("canvas support missing"); log("canvas support missing");
Object.keys(t).forEach((tensor3) => tf23.dispose(t[tensor3])); Object.keys(t).forEach((tensor3) => tf23.dispose(t[tensor3]));
@ -10602,7 +10628,7 @@ function reset(instance) {
} }
async function load15(instance) { async function load15(instance) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
if (env2.initial) if (env.initial)
reset(instance); reset(instance);
if (instance.config.hand.enabled) { if (instance.config.hand.enabled) {
if (!instance.models.handpose && ((_b = (_a = instance.config.hand.detector) == null ? void 0 : _a.modelPath) == null ? void 0 : _b.includes("handdetect"))) if (!instance.models.handpose && ((_b = (_a = instance.config.hand.detector) == null ? void 0 : _a.modelPath) == null ? void 0 : _b.includes("handdetect")))
@ -10796,24 +10822,24 @@ async function register(instance) {
var tf25 = __toModule(require_tfjs_esm()); var tf25 = __toModule(require_tfjs_esm());
async function check(instance, force = false) { async function check(instance, force = false) {
instance.state = "backend"; instance.state = "backend";
if (force || env2.initial || instance.config.backend && instance.config.backend.length > 0 && tf25.getBackend() !== instance.config.backend) { if (force || env.initial || instance.config.backend && instance.config.backend.length > 0 && tf25.getBackend() !== instance.config.backend) {
const timeStamp = now(); const timeStamp = now();
if (instance.config.backend && instance.config.backend.length > 0) { if (instance.config.backend && instance.config.backend.length > 0) {
if (typeof window === "undefined" && typeof WorkerGlobalScope !== "undefined" && instance.config.debug) { if (typeof window === "undefined" && typeof WorkerGlobalScope !== "undefined" && instance.config.debug) {
if (instance.config.debug) if (instance.config.debug)
log("running inside web worker"); log("running inside web worker");
} }
if (env2.browser && instance.config.backend === "tensorflow") { if (env.browser && instance.config.backend === "tensorflow") {
if (instance.config.debug) if (instance.config.debug)
log("override: backend set to tensorflow while running in browser"); log("override: backend set to tensorflow while running in browser");
instance.config.backend = "humangl"; instance.config.backend = "humangl";
} }
if (env2.node && (instance.config.backend === "webgl" || instance.config.backend === "humangl")) { if (env.node && (instance.config.backend === "webgl" || instance.config.backend === "humangl")) {
if (instance.config.debug) if (instance.config.debug)
log(`override: backend set to ${instance.config.backend} while running in nodejs`); log(`override: backend set to ${instance.config.backend} while running in nodejs`);
instance.config.backend = "tensorflow"; instance.config.backend = "tensorflow";
} }
if (env2.browser && instance.config.backend === "webgpu") { if (env.browser && instance.config.backend === "webgpu") {
if (typeof navigator === "undefined" || typeof navigator["gpu"] === "undefined") { if (typeof navigator === "undefined" || typeof navigator["gpu"] === "undefined") {
log("override: backend set to webgpu but browser does not support webgpu"); log("override: backend set to webgpu but browser does not support webgpu");
instance.config.backend = "humangl"; instance.config.backend = "humangl";
@ -10830,7 +10856,7 @@ async function check(instance, force = false) {
log("available backends:", available); log("available backends:", available);
if (!available.includes(instance.config.backend)) { if (!available.includes(instance.config.backend)) {
log(`error: backend ${instance.config.backend} not found in registry`); log(`error: backend ${instance.config.backend} not found in registry`);
instance.config.backend = env2.node ? "tensorflow" : "webgl"; instance.config.backend = env.node ? "tensorflow" : "webgl";
if (instance.config.debug) if (instance.config.debug)
log(`override: setting backend ${instance.config.backend}`); log(`override: setting backend ${instance.config.backend}`);
} }
@ -10861,7 +10887,6 @@ async function check(instance, force = false) {
if (tf25.getBackend() === "humangl") { if (tf25.getBackend() === "humangl") {
tf25.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false); tf25.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
tf25.ENV.set("WEBGL_CPU_FORWARD", true); tf25.ENV.set("WEBGL_CPU_FORWARD", true);
tf25.ENV.set("WEBGL_PACK_DEPTHWISECONV", false);
tf25.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true); tf25.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true);
tf25.ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256); tf25.ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256);
if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) { if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) {
@ -10875,16 +10900,12 @@ async function check(instance, force = false) {
} }
} }
if (tf25.getBackend() === "webgpu") { if (tf25.getBackend() === "webgpu") {
tf25.ENV.set("WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD", 512);
tf25.ENV.set("WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE", 0);
tf25.ENV.set("WEBGPU_CPU_FORWARD", true);
} }
tf25.enableProdMode(); tf25.enableProdMode();
await tf25.ready(); await tf25.ready();
instance.performance.backend = Math.trunc(now() - timeStamp); instance.performance.backend = Math.trunc(now() - timeStamp);
instance.config.backend = tf25.getBackend(); instance.config.backend = tf25.getBackend();
get(); env.updateBackend();
instance.env = env2;
} }
return true; return true;
} }
@ -10900,7 +10921,7 @@ function fakeOps(kernelNames, config3) {
}; };
tf25.registerKernel(kernelConfig); tf25.registerKernel(kernelConfig);
} }
env2.kernels = tf25.getKernelsForBackend(tf25.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); env.kernels = tf25.getKernelsForBackend(tf25.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
} }
// src/util/draw.ts // src/util/draw.ts
@ -12647,8 +12668,8 @@ async function warmupCanvas(instance) {
let img; let img;
if (typeof Image !== "undefined") if (typeof Image !== "undefined")
img = new Image(); img = new Image();
else if (env2.Image) else if (env.Image)
img = new env2.Image(); img = new env.Image();
img.onload = async () => { img.onload = async () => {
const canvas3 = canvas(img.naturalWidth, img.naturalHeight); const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
if (!canvas3) { if (!canvas3) {
@ -12702,7 +12723,7 @@ async function warmup(instance, userConfig) {
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (typeof createImageBitmap === "function") if (typeof createImageBitmap === "function")
res = await warmupBitmap(instance); res = await warmupBitmap(instance);
else if (typeof Image !== "undefined" || env2.Canvas !== void 0) else if (typeof Image !== "undefined" || env.Canvas !== void 0)
res = await warmupCanvas(instance); res = await warmupCanvas(instance);
else else
res = await warmupNode(instance); res = await warmupNode(instance);
@ -12767,11 +12788,10 @@ var Human = class {
if (this.events && this.events.dispatchEvent) if (this.events && this.events.dispatchEvent)
(_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event)); (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
}); });
get(); this.env = env;
this.env = env2;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf28.version_core}/dist/`; config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf28.version_core}/dist/`;
config.modelBasePath = this.env.browser ? "../models/" : "file://models/"; config.modelBasePath = env.browser ? "../models/" : "file://models/";
config.backend = this.env.browser ? "humangl" : "tensorflow"; config.backend = env.browser ? "humangl" : "tensorflow";
this.version = version; this.version = version;
Object.defineProperty(this, "version", { value: version }); Object.defineProperty(this, "version", { value: version });
this.config = JSON.parse(JSON.stringify(config)); this.config = JSON.parse(JSON.stringify(config));
@ -12827,7 +12847,6 @@ var Human = class {
async init() { async init() {
await check(this, true); await check(this, true);
await this.tf.ready(); await this.tf.ready();
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -12835,7 +12854,7 @@ var Human = class {
const count2 = Object.values(this.models).filter((model14) => model14).length; const count2 = Object.values(this.models).filter((model14) => model14).length;
if (userConfig) if (userConfig)
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (env2.initial) { if (env.initial) {
if (this.config.debug) if (this.config.debug)
log(`version: ${this.version}`); log(`version: ${this.version}`);
if (this.config.debug) if (this.config.debug)
@ -12851,9 +12870,9 @@ var Human = class {
} }
} }
await load15(this); await load15(this);
if (env2.initial && this.config.debug) if (env.initial && this.config.debug)
log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors"); log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors");
env2.initial = false; env.initial = false;
const loaded = Object.values(this.models).filter((model14) => model14).length; const loaded = Object.values(this.models).filter((model14) => model14).length;
if (loaded !== count2) { if (loaded !== count2) {
await validate2(this); await validate2(this);
@ -13039,6 +13058,7 @@ _checkSanity = new WeakMap();
_sanity = new WeakMap(); _sanity = new WeakMap();
// Annotate the CommonJS export names for ESM import in node: // Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = { 0 && (module.exports = {
Env,
Human, Human,
Models, Models,
defaults, defaults,

View File

@ -83,11 +83,12 @@ var require_tfjs_esm = __commonJS({
// src/human.ts // src/human.ts
__export(exports, { __export(exports, {
Env: () => Env,
Human: () => Human, Human: () => Human,
Models: () => Models, Models: () => Models,
default: () => Human, default: () => Human,
defaults: () => config, defaults: () => config,
env: () => env2 env: () => env
}); });
// src/util/util.ts // src/util/util.ts
@ -254,17 +255,11 @@ var config = {
} }
}; };
// src/human.ts // src/util/env.ts
var tf28 = __toModule(require_tfjs_esm()); var tf2 = __toModule(require_tfjs_esm());
// package.json
var version = "2.3.5";
// src/tfjs/humangl.ts
var tf24 = __toModule(require_tfjs_esm());
// src/image/image.ts // src/image/image.ts
var tf2 = __toModule(require_tfjs_esm()); var tf = __toModule(require_tfjs_esm());
// src/image/imagefxshaders.ts // src/image/imagefxshaders.ts
var vertexIdentity = ` var vertexIdentity = `
@ -407,7 +402,7 @@ var GLProgram = class {
this.uniform[u] = this.gl.getUniformLocation(this.id, u); this.uniform[u] = this.gl.getUniformLocation(this.id, u);
} }
}; };
function GLImageFilter(params = {}) { function GLImageFilter() {
let drawCount = 0; let drawCount = 0;
let sourceTexture = null; let sourceTexture = null;
let lastInChain = false; let lastInChain = false;
@ -416,17 +411,17 @@ function GLImageFilter(params = {}) {
let filterChain = []; let filterChain = [];
let vertexBuffer = null; let vertexBuffer = null;
let currentProgram = null; let currentProgram = null;
const canvas3 = params["canvas"] || typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(100, 100) : document.createElement("canvas"); const fxcanvas = canvas(100, 100);
const shaderProgramCache = {}; const shaderProgramCache = {};
const DRAW = { INTERMEDIATE: 1 }; const DRAW = { INTERMEDIATE: 1 };
const gl = canvas3.getContext("webgl"); const gl = fxcanvas.getContext("webgl");
if (!gl) if (!gl)
throw new Error("filter: cannot get webgl context"); throw new Error("filter: cannot get webgl context");
function resize(width, height) { function resize(width, height) {
if (width === canvas3.width && height === canvas3.height) if (width === fxcanvas.width && height === fxcanvas.height)
return; return;
canvas3.width = width; fxcanvas.width = width;
canvas3.height = height; fxcanvas.height = height;
if (!vertexBuffer) { if (!vertexBuffer) {
const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]);
vertexBuffer = gl.createBuffer(); vertexBuffer = gl.createBuffer();
@ -434,7 +429,7 @@ function GLImageFilter(params = {}) {
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true); gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
} }
gl.viewport(0, 0, canvas3.width, canvas3.height); gl.viewport(0, 0, fxcanvas.width, fxcanvas.height);
tempFramebuffers = [null, null]; tempFramebuffers = [null, null];
} }
function createFramebufferTexture(width, height) { function createFramebufferTexture(width, height) {
@ -455,7 +450,7 @@ function GLImageFilter(params = {}) {
return { fbo, texture }; return { fbo, texture };
} }
function getTempFramebuffer(index) { function getTempFramebuffer(index) {
tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(canvas3.width, canvas3.height); tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(fxcanvas.width, fxcanvas.height);
return tempFramebuffers[index]; return tempFramebuffers[index];
} }
function draw2(flags = 0) { function draw2(flags = 0) {
@ -817,8 +812,8 @@ function GLImageFilter(params = {}) {
}, },
convolution: (matrix) => { convolution: (matrix) => {
const m = new Float32Array(matrix); const m = new Float32Array(matrix);
const pixelSizeX = 1 / canvas3.width; const pixelSizeX = 1 / fxcanvas.width;
const pixelSizeY = 1 / canvas3.height; const pixelSizeY = 1 / fxcanvas.height;
const program = compileShader(convolution); const program = compileShader(convolution);
gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m); gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY);
@ -892,8 +887,8 @@ function GLImageFilter(params = {}) {
]); ]);
}, },
blur: (size2) => { blur: (size2) => {
const blurSizeX = size2 / 7 / canvas3.width; const blurSizeX = size2 / 7 / fxcanvas.width;
const blurSizeY = size2 / 7 / canvas3.height; const blurSizeY = size2 / 7 / fxcanvas.height;
const program = compileShader(blur); const program = compileShader(blur);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY);
draw2(DRAW.INTERMEDIATE); draw2(DRAW.INTERMEDIATE);
@ -901,8 +896,8 @@ function GLImageFilter(params = {}) {
draw2(); draw2();
}, },
pixelate: (size2) => { pixelate: (size2) => {
const blurSizeX = size2 / canvas3.width; const blurSizeX = size2 / fxcanvas.width;
const blurSizeY = size2 / canvas3.height; const blurSizeY = size2 / fxcanvas.height;
const program = compileShader(pixelate); const program = compileShader(pixelate);
gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY);
draw2(); draw2();
@ -935,7 +930,7 @@ function GLImageFilter(params = {}) {
const f = filterChain[i]; const f = filterChain[i];
f.func.apply(this, f.args || []); f.func.apply(this, f.args || []);
} }
return canvas3; return fxcanvas;
}; };
this.draw = function(image25) { this.draw = function(image25) {
this.add("brightness", 0); this.add("brightness", 0);
@ -943,95 +938,6 @@ function GLImageFilter(params = {}) {
}; };
} }
// src/util/env.ts
var tf = __toModule(require_tfjs_esm());
var env2 = {
browser: void 0,
node: void 0,
worker: void 0,
platform: void 0,
agent: void 0,
initial: true,
backends: [],
offscreen: void 0,
filter: void 0,
tfjs: {
version: void 0
},
wasm: {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
},
webgl: {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
},
webgpu: {
supported: void 0,
backend: void 0,
adapter: void 0
},
kernels: [],
Canvas: void 0,
Image: void 0,
ImageData: void 0
};
async function backendInfo() {
var _a;
env2.backends = Object.keys(tf.engine().registryFactory);
env2.wasm.supported = typeof WebAssembly !== "undefined";
env2.wasm.backend = env2.backends.includes("wasm");
if (env2.wasm.supported && env2.wasm.backend && tf.getBackend() === "wasm") {
env2.wasm.simd = await tf.env().getAsync("WASM_HAS_SIMD_SUPPORT");
env2.wasm.multithread = await tf.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
env2.webgl.supported = typeof ctx !== "undefined";
env2.webgl.backend = env2.backends.includes("webgl");
if (env2.webgl.supported && env2.webgl.backend && (tf.getBackend() === "webgl" || tf.getBackend() === "humangl")) {
const gl = tf.backend().gpgpu !== "undefined" ? await tf.backend().getGPGPUContext().gl : null;
if (gl) {
env2.webgl.version = gl.getParameter(gl.VERSION);
env2.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
env2.webgpu.supported = env2.browser && typeof navigator["gpu"] !== "undefined";
env2.webgpu.backend = env2.backends.includes("webgpu");
if (env2.webgpu.supported)
env2.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
env2.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async function get() {
env2.browser = typeof navigator !== "undefined";
env2.node = typeof process !== "undefined";
env2.tfjs.version = tf.version_core;
env2.offscreen = typeof env2.offscreen === "undefined" ? typeof OffscreenCanvas !== "undefined" : env2.offscreen;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
env2.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
env2.agent = navigator.userAgent.replace(raw[0], "");
if (env2.platform[1])
env2.agent = env2.agent.replace(raw[1], "");
env2.agent = env2.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
env2.platform = `${process.platform} ${process.arch}`;
env2.agent = `NodeJS ${process.version}`;
}
env2.worker = env2.browser && env2.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
await backendInfo();
}
async function set(obj) {
env2 = mergeDeep(env2, obj);
}
// src/image/image.ts // src/image/image.ts
var maxSize = 2048; var maxSize = 2048;
var inCanvas = null; var inCanvas = null;
@ -1040,8 +946,8 @@ var tmpCanvas = null;
var fx; var fx;
function canvas(width, height) { function canvas(width, height) {
let c; let c;
if (env2.browser) { if (env.browser) {
if (env2.offscreen) { if (env.offscreen) {
c = new OffscreenCanvas(width, height); c = new OffscreenCanvas(width, height);
} else { } else {
if (typeof document === "undefined") if (typeof document === "undefined")
@ -1051,8 +957,8 @@ function canvas(width, height) {
c.height = height; c.height = height;
} }
} else { } else {
if (typeof env2.Canvas !== "undefined") if (typeof env.Canvas !== "undefined")
c = new env2.Canvas(width, height); c = new env.Canvas(width, height);
else if (typeof globalThis.Canvas !== "undefined") else if (typeof globalThis.Canvas !== "undefined")
c = new globalThis.Canvas(width, height); c = new globalThis.Canvas(width, height);
} }
@ -1070,16 +976,16 @@ function process2(input, config3, getTensor = true) {
log("input is missing"); log("input is missing");
return { tensor: null, canvas: null }; return { tensor: null, canvas: null };
} }
if (!(input instanceof tf2.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env2.Canvas !== "undefined" && input instanceof env2.Canvas) && !(typeof globalThis.Canvas !== "undefined" && input instanceof globalThis.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) { if (!(input instanceof tf.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof globalThis.Canvas !== "undefined" && input instanceof globalThis.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
throw new Error("input type is not recognized"); throw new Error("input type is not recognized");
} }
if (input instanceof tf2.Tensor) { if (input instanceof tf.Tensor) {
if (input["isDisposedInternal"]) { if (input["isDisposedInternal"]) {
throw new Error("input tensor is disposed"); throw new Error("input tensor is disposed");
} else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) { } else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) {
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input["shape"]}`); throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input["shape"]}`);
} else { } else {
return { tensor: tf2.clone(input), canvas: config3.filter.return ? outCanvas : null }; return { tensor: tf.clone(input), canvas: config3.filter.return ? outCanvas : null };
} }
} else { } else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) { if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
@ -1131,10 +1037,10 @@ function process2(input, config3, getTensor = true) {
} }
if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height)) if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height))
outCanvas = canvas(inCanvas.width, inCanvas.height); outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env2.webgl.supported) { if (config3.filter.enabled && env.webgl.supported) {
if (!fx) if (!fx)
fx = env2.browser ? new GLImageFilter({ canvas: outCanvas }) : null; fx = env.browser ? new GLImageFilter() : null;
env2.filter = !!fx; env.filter = !!fx;
if (!fx) if (!fx)
return { tensor: null, canvas: inCanvas }; return { tensor: null, canvas: inCanvas };
fx.reset(); fx.reset();
@ -1174,7 +1080,7 @@ function process2(input, config3, getTensor = true) {
copy(inCanvas, outCanvas); copy(inCanvas, outCanvas);
if (fx) if (fx)
fx = null; fx = null;
env2.filter = !!fx; env.filter = !!fx;
} }
if (!getTensor) if (!getTensor)
return { tensor: null, canvas: outCanvas }; return { tensor: null, canvas: outCanvas };
@ -1183,22 +1089,22 @@ function process2(input, config3, getTensor = true) {
let pixels; let pixels;
let depth = 3; let depth = 3;
if (typeof ImageData !== "undefined" && input instanceof ImageData || input["data"] && input["width"] && input["height"]) { if (typeof ImageData !== "undefined" && input instanceof ImageData || input["data"] && input["width"] && input["height"]) {
if (env2.browser && tf2.browser) { if (env.browser && tf.browser) {
pixels = tf2.browser ? tf2.browser.fromPixels(input) : null; pixels = tf.browser ? tf.browser.fromPixels(input) : null;
} else { } else {
depth = input["data"].length / input["height"] / input["width"]; depth = input["data"].length / input["height"] / input["width"];
const arr = new Uint8Array(input["data"]["buffer"]); const arr = new Uint8Array(input["data"]["buffer"]);
pixels = tf2.tensor(arr, [input["height"], input["width"], depth], "int32"); pixels = tf.tensor(arr, [input["height"], input["width"], depth], "int32");
} }
} else { } else {
if (!tmpCanvas || outCanvas.width !== tmpCanvas.width || (outCanvas == null ? void 0 : outCanvas.height) !== (tmpCanvas == null ? void 0 : tmpCanvas.height)) if (!tmpCanvas || outCanvas.width !== tmpCanvas.width || (outCanvas == null ? void 0 : outCanvas.height) !== (tmpCanvas == null ? void 0 : tmpCanvas.height))
tmpCanvas = canvas(outCanvas.width, outCanvas.height); tmpCanvas = canvas(outCanvas.width, outCanvas.height);
if (tf2.browser && env2.browser) { if (tf.browser && env.browser) {
if (config3.backend === "webgl" || config3.backend === "humangl" || config3.backend === "webgpu") { if (config3.backend === "webgl" || config3.backend === "humangl" || config3.backend === "webgpu") {
pixels = tf2.browser.fromPixels(outCanvas); pixels = tf.browser.fromPixels(outCanvas);
} else { } else {
tmpCanvas = copy(outCanvas); tmpCanvas = copy(outCanvas);
pixels = tf2.browser.fromPixels(tmpCanvas); pixels = tf.browser.fromPixels(tmpCanvas);
} }
} else { } else {
const tempCanvas = copy(outCanvas); const tempCanvas = copy(outCanvas);
@ -1206,19 +1112,19 @@ function process2(input, config3, getTensor = true) {
const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight); const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
depth = tempData.data.length / targetWidth / targetHeight; depth = tempData.data.length / targetWidth / targetHeight;
const arr = new Uint8Array(tempData.data.buffer); const arr = new Uint8Array(tempData.data.buffer);
pixels = tf2.tensor(arr, [targetWidth, targetHeight, depth]); pixels = tf.tensor(arr, [targetWidth, targetHeight, depth]);
} }
} }
if (depth === 4) { if (depth === 4) {
const rgb2 = tf2.slice3d(pixels, [0, 0, 0], [-1, -1, 3]); const rgb2 = tf.slice3d(pixels, [0, 0, 0], [-1, -1, 3]);
tf2.dispose(pixels); tf.dispose(pixels);
pixels = rgb2; pixels = rgb2;
} }
if (!pixels) if (!pixels)
throw new Error("cannot create tensor from input"); throw new Error("cannot create tensor from input");
const casted = tf2.cast(pixels, "float32"); const casted = tf.cast(pixels, "float32");
const tensor3 = tf2.expandDims(casted, 0); const tensor3 = tf.expandDims(casted, 0);
tf2.dispose([pixels, casted]); tf.dispose([pixels, casted]);
return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null }; return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null };
} }
} }
@ -1227,11 +1133,11 @@ var lastCacheDiff = 1;
var benchmarked = 0; var benchmarked = 0;
var checksum = async (input) => { var checksum = async (input) => {
const resizeFact = 48; const resizeFact = 48;
const reduced = tf2.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]); const reduced = tf.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]);
const tfSum = async () => { const tfSum = async () => {
const sumT = tf2.sum(reduced); const sumT = tf.sum(reduced);
const sum0 = await sumT.data(); const sum0 = await sumT.data();
tf2.dispose(sumT); tf.dispose(sumT);
return sum0[0]; return sum0[0];
}; };
const jsSum = async () => { const jsSum = async () => {
@ -1250,7 +1156,7 @@ var checksum = async (input) => {
benchmarked = t1 - t0 < t2 - t1 ? 1 : 2; benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;
} }
const res = benchmarked === 1 ? await jsSum() : await tfSum(); const res = benchmarked === 1 ? await jsSum() : await tfSum();
tf2.dispose(reduced); tf.dispose(reduced);
return res; return res;
}; };
async function skip(config3, input) { async function skip(config3, input) {
@ -1265,12 +1171,132 @@ async function skip(config3, input) {
return skipFrame; return skipFrame;
} }
// src/util/env.ts
var Env = class {
constructor() {
__publicField(this, "browser");
__publicField(this, "node");
__publicField(this, "worker");
__publicField(this, "platform", "");
__publicField(this, "agent", "");
__publicField(this, "backends", []);
__publicField(this, "initial");
__publicField(this, "filter");
__publicField(this, "tfjs");
__publicField(this, "offscreen");
__publicField(this, "wasm", {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
});
__publicField(this, "webgl", {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
});
__publicField(this, "webgpu", {
supported: void 0,
backend: void 0,
adapter: void 0
});
__publicField(this, "cpu", {
model: void 0,
flags: []
});
__publicField(this, "kernels", []);
__publicField(this, "Canvas");
__publicField(this, "Image");
__publicField(this, "ImageData");
this.browser = typeof navigator !== "undefined";
this.node = typeof process !== "undefined";
this.tfjs = { version: tf2.version_core };
this.offscreen = typeof OffscreenCanvas !== "undefined";
this.initial = true;
this.worker = this.browser && this.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
this.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
this.agent = navigator.userAgent.replace(raw[0], "");
if (this.platform[1])
this.agent = this.agent.replace(raw[1], "");
this.agent = this.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
this.platform = `${process.platform} ${process.arch}`;
this.agent = `NodeJS ${process.version}`;
}
}
async updateBackend() {
var _a;
this.backends = Object.keys(tf2.engine().registryFactory);
this.wasm.supported = typeof WebAssembly !== "undefined";
this.wasm.backend = this.backends.includes("wasm");
if (this.wasm.supported && this.wasm.backend && tf2.getBackend() === "wasm") {
this.wasm.simd = await tf2.env().getAsync("WASM_HAS_SIMD_SUPPORT");
this.wasm.multithread = await tf2.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
this.webgl.supported = typeof ctx !== "undefined";
this.webgl.backend = this.backends.includes("webgl");
if (this.webgl.supported && this.webgl.backend && (tf2.getBackend() === "webgl" || tf2.getBackend() === "humangl")) {
const gl = tf2.backend().gpgpu !== "undefined" ? await tf2.backend().getGPGPUContext().gl : null;
if (gl) {
this.webgl.version = gl.getParameter(gl.VERSION);
this.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
this.webgpu.supported = this.browser && typeof navigator["gpu"] !== "undefined";
this.webgpu.backend = this.backends.includes("webgpu");
if (this.webgpu.supported)
this.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
this.kernels = tf2.getKernelsForBackend(tf2.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async updateCPU() {
var _a;
const cpu = { model: "", flags: [] };
if (this.node && ((_a = this.platform) == null ? void 0 : _a.startsWith("linux"))) {
const fs = require("fs");
try {
const data = fs.readFileSync("/proc/cpuinfo").toString();
for (const line of data.split("\n")) {
if (line.startsWith("model name")) {
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
}
if (line.startsWith("flags")) {
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
}
}
} catch (e) {
}
}
if (!this["cpu"])
Object.defineProperty(this, "cpu", { value: cpu });
else
this["cpu"] = cpu;
}
};
var env = new Env();
// src/human.ts
var tf28 = __toModule(require_tfjs_esm());
// package.json
var version = "2.3.5";
// src/tfjs/humangl.ts
var tf24 = __toModule(require_tfjs_esm());
// src/gear/gear-agegenderrace.ts // src/gear/gear-agegenderrace.ts
var tf3 = __toModule(require_tfjs_esm()); var tf3 = __toModule(require_tfjs_esm());
var model; var model;
var skipped = Number.MAX_SAFE_INTEGER; var skipped = Number.MAX_SAFE_INTEGER;
async function load(config3) { async function load(config3) {
if (env2.initial) if (env.initial)
model = null; model = null;
if (!model) { if (!model) {
model = await tf3.loadGraphModel(join(config3.modelBasePath, config3.face.agegenderrace.modelPath)); model = await tf3.loadGraphModel(join(config3.modelBasePath, config3.face.agegenderrace.modelPath));
@ -1291,7 +1317,7 @@ var skipped2 = Number.MAX_SAFE_INTEGER;
var lastCount = 0; var lastCount = 0;
async function load2(config3) { async function load2(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model2 = null; model2 = null;
if (!model2) { if (!model2) {
model2 = await tf4.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.antispoof) == null ? void 0 : _a.modelPath) || "")); model2 = await tf4.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.antispoof) == null ? void 0 : _a.modelPath) || ""));
@ -4756,7 +4782,7 @@ var inputSize = 0;
var size = () => inputSize; var size = () => inputSize;
async function load3(config3) { async function load3(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model3 = null; model3 = null;
if (!model3) { if (!model3) {
model3 = await tf6.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.detector) == null ? void 0 : _a.modelPath) || "")); model3 = await tf6.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.detector) == null ? void 0 : _a.modelPath) || ""));
@ -5122,7 +5148,7 @@ var inputSize3 = 0;
var last = []; var last = [];
var skipped4 = Number.MAX_SAFE_INTEGER; var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) { async function load4(config3) {
if (env2.initial) if (env.initial)
model4 = null; model4 = null;
if (!model4) { if (!model4) {
fakeOps(["floormod"], config3); fakeOps(["floormod"], config3);
@ -5189,7 +5215,7 @@ async function predict3(input, config3) {
return last; return last;
} }
skipped4 = 0; skipped4 = 0;
if (!env2.kernels.includes("mod") || !env2.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last; return last;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [input.shape[2], input.shape[1]]; const outputSize2 = [input.shape[2], input.shape[1]];
@ -5243,7 +5269,7 @@ var model5;
var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} }; var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };
var skipped5 = Number.MAX_SAFE_INTEGER; var skipped5 = Number.MAX_SAFE_INTEGER;
async function load5(config3) { async function load5(config3) {
if (env2.initial) if (env.initial)
model5 = null; model5 = null;
if (!model5) { if (!model5) {
model5 = await tf9.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || "")); model5 = await tf9.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || ""));
@ -5357,7 +5383,7 @@ var skipped6 = Number.MAX_SAFE_INTEGER;
var rgb = [0.2989, 0.587, 0.114]; var rgb = [0.2989, 0.587, 0.114];
async function load6(config3) { async function load6(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model6 = null; model6 = null;
if (!model6) { if (!model6) {
model6 = await tf10.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.emotion) == null ? void 0 : _a.modelPath) || "")); model6 = await tf10.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.emotion) == null ? void 0 : _a.modelPath) || ""));
@ -5435,7 +5461,7 @@ var irisLandmarks = {
}; };
async function load7(config3) { async function load7(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model7 = null; model7 = null;
if (!model7) { if (!model7) {
model7 = await tf11.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.iris) == null ? void 0 : _a.modelPath) || "")); model7 = await tf11.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.iris) == null ? void 0 : _a.modelPath) || ""));
@ -5480,7 +5506,7 @@ var getEyeBox = (rawCoords, face5, eyeInnerCornerIndex, eyeOuterCornerIndex, fli
box4.endPoint[1] / meshSize, box4.endPoint[1] / meshSize,
box4.endPoint[0] / meshSize box4.endPoint[0] / meshSize
]], [0], [inputSize4, inputSize4]); ]], [0], [inputSize4, inputSize4]);
if (flip && env2.kernels.includes("flipleftright")) { if (flip && env.kernels.includes("flipleftright")) {
const flipped = tf11.image.flipLeftRight(crop2); const flipped = tf11.image.flipLeftRight(crop2);
tf11.dispose(crop2); tf11.dispose(crop2);
crop2 = flipped; crop2 = flipped;
@ -5594,7 +5620,7 @@ async function predict6(input, config3) {
faceScore: 0, faceScore: 0,
annotations: {} annotations: {}
}; };
if (((_d = config3.face.detector) == null ? void 0 : _d.rotation) && ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) && env2.kernels.includes("rotatewithoffset")) { if (((_d = config3.face.detector) == null ? void 0 : _d.rotation) && ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else { } else {
rotationMatrix = IDENTITY_MATRIX; rotationMatrix = IDENTITY_MATRIX;
@ -5636,7 +5662,7 @@ async function predict6(input, config3) {
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence }; box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]); face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (((_j = config3.face.detector) == null ? void 0 : _j.rotation) && config3.face.mesh.enabled && ((_k = config3.face.description) == null ? void 0 : _k.enabled) && env2.kernels.includes("rotatewithoffset")) { if (((_j = config3.face.detector) == null ? void 0 : _j.rotation) && config3.face.mesh.enabled && ((_k = config3.face.description) == null ? void 0 : _k.enabled) && env.kernels.includes("rotatewithoffset")) {
tf12.dispose(face5.tensor); tf12.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} }
@ -5660,7 +5686,7 @@ async function predict6(input, config3) {
} }
async function load8(config3) { async function load8(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model8 = null; model8 = null;
if (!model8) { if (!model8) {
model8 = await tf12.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.mesh) == null ? void 0 : _a.modelPath) || "")); model8 = await tf12.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.mesh) == null ? void 0 : _a.modelPath) || ""));
@ -5687,7 +5713,7 @@ var skipped8 = Number.MAX_SAFE_INTEGER;
async function load9(config3) { async function load9(config3) {
var _a, _b; var _a, _b;
const modelUrl = join(config3.modelBasePath, ((_a = config3.face.description) == null ? void 0 : _a.modelPath) || ""); const modelUrl = join(config3.modelBasePath, ((_a = config3.face.description) == null ? void 0 : _a.modelPath) || "");
if (env2.initial) if (env.initial)
model9 = null; model9 = null;
if (!model9) { if (!model9) {
model9 = await tf13.loadGraphModel(modelUrl); model9 = await tf13.loadGraphModel(modelUrl);
@ -8992,7 +9018,7 @@ var HandPipeline = class {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0; const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox); const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]]; const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
const rotatedImage = config3.hand.rotation && env2.kernels.includes("rotatewithoffset") ? tf16.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone(); const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf16.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter); const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox; const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]); const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9494,7 +9520,7 @@ async function predict8(input, config3) {
} }
async function load10(config3) { async function load10(config3) {
var _a, _b, _c, _d, _e, _f; var _a, _b, _c, _d, _e, _f;
if (env2.initial) { if (env.initial) {
handDetectorModel = null; handDetectorModel = null;
handPoseModel = null; handPoseModel = null;
} }
@ -9584,7 +9610,7 @@ var fingerMap = {
}; };
async function loadDetect2(config3) { async function loadDetect2(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
models2[0] = null; models2[0] = null;
if (!models2[0]) { if (!models2[0]) {
fakeOps(["tensorlistreserve", "enter", "tensorlistfromtensor", "merge", "loopcond", "switch", "exit", "tensorliststack", "nextiteration", "tensorlistsetitem", "tensorlistgetitem", "reciprocal", "shape", "split", "where"], config3); fakeOps(["tensorlistreserve", "enter", "tensorlistfromtensor", "merge", "loopcond", "switch", "exit", "tensorliststack", "nextiteration", "tensorlistsetitem", "tensorlistgetitem", "reciprocal", "shape", "split", "where"], config3);
@ -9602,7 +9628,7 @@ async function loadDetect2(config3) {
} }
async function loadSkeleton(config3) { async function loadSkeleton(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
models2[1] = null; models2[1] = null;
if (!models2[1]) { if (!models2[1]) {
models2[1] = await tf18.loadGraphModel(join(config3.modelBasePath, ((_a = config3.hand.skeleton) == null ? void 0 : _a.modelPath) || "")); models2[1] = await tf18.loadGraphModel(join(config3.modelBasePath, ((_a = config3.hand.skeleton) == null ? void 0 : _a.modelPath) || ""));
@ -9911,7 +9937,7 @@ var cache5 = {
bodies: [] bodies: []
}; };
async function load11(config3) { async function load11(config3) {
if (env2.initial) if (env.initial)
model10 = null; model10 = null;
if (!model10) { if (!model10) {
fakeOps(["size"], config3); fakeOps(["size"], config3);
@ -10044,7 +10070,7 @@ var last4 = [];
var skipped11 = Number.MAX_SAFE_INTEGER; var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5; var scaleBox = 2.5;
async function load12(config3) { async function load12(config3) {
if (!model11 || env2.initial) { if (!model11 || env.initial) {
model11 = await tf21.loadGraphModel(join(config3.modelBasePath, config3.object.modelPath || "")); model11 = await tf21.loadGraphModel(join(config3.modelBasePath, config3.object.modelPath || ""));
const inputs = Object.values(model11.modelSignature["inputs"]); const inputs = Object.values(model11.modelSignature["inputs"]);
model11.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null; model11.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
@ -10125,7 +10151,7 @@ async function predict11(image25, config3) {
return last4; return last4;
} }
skipped11 = 0; skipped11 = 0;
if (!env2.kernels.includes("mod") || !env2.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last4; return last4;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]]; const outputSize2 = [image25.shape[2], image25.shape[1]];
@ -10476,7 +10502,7 @@ async function predict12(input, config3) {
return scaled; return scaled;
} }
async function load13(config3) { async function load13(config3) {
if (!model12 || env2.initial) { if (!model12 || env.initial) {
model12 = await tf22.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || "")); model12 = await tf22.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || ""));
if (!model12 || !model12["modelUrl"]) if (!model12 || !model12["modelUrl"])
log("load model failed:", config3.body.modelPath); log("load model failed:", config3.body.modelPath);
@ -10492,7 +10518,7 @@ var tf23 = __toModule(require_tfjs_esm());
var model13; var model13;
var busy = false; var busy = false;
async function load14(config3) { async function load14(config3) {
if (!model13 || env2.initial) { if (!model13 || env.initial) {
model13 = await tf23.loadGraphModel(join(config3.modelBasePath, config3.segmentation.modelPath || "")); model13 = await tf23.loadGraphModel(join(config3.modelBasePath, config3.segmentation.modelPath || ""));
if (!model13 || !model13["modelUrl"]) if (!model13 || !model13["modelUrl"])
log("load model failed:", config3.segmentation.modelPath); log("load model failed:", config3.segmentation.modelPath);
@ -10531,7 +10557,7 @@ async function process5(input, background, config3) {
t.data = tf23.image.resizeBilinear(t.squeeze, [height, width]); t.data = tf23.image.resizeBilinear(t.squeeze, [height, width]);
} }
const data = Array.from(await t.data.data()); const data = Array.from(await t.data.data());
if (env2.node && !env2.Canvas && typeof ImageData === "undefined") { if (env.node && !env.Canvas && typeof ImageData === "undefined") {
if (config3.debug) if (config3.debug)
log("canvas support missing"); log("canvas support missing");
Object.keys(t).forEach((tensor3) => tf23.dispose(t[tensor3])); Object.keys(t).forEach((tensor3) => tf23.dispose(t[tensor3]));
@ -10603,7 +10629,7 @@ function reset(instance) {
} }
async function load15(instance) { async function load15(instance) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
if (env2.initial) if (env.initial)
reset(instance); reset(instance);
if (instance.config.hand.enabled) { if (instance.config.hand.enabled) {
if (!instance.models.handpose && ((_b = (_a = instance.config.hand.detector) == null ? void 0 : _a.modelPath) == null ? void 0 : _b.includes("handdetect"))) if (!instance.models.handpose && ((_b = (_a = instance.config.hand.detector) == null ? void 0 : _a.modelPath) == null ? void 0 : _b.includes("handdetect")))
@ -10797,24 +10823,24 @@ async function register(instance) {
var tf25 = __toModule(require_tfjs_esm()); var tf25 = __toModule(require_tfjs_esm());
async function check(instance, force = false) { async function check(instance, force = false) {
instance.state = "backend"; instance.state = "backend";
if (force || env2.initial || instance.config.backend && instance.config.backend.length > 0 && tf25.getBackend() !== instance.config.backend) { if (force || env.initial || instance.config.backend && instance.config.backend.length > 0 && tf25.getBackend() !== instance.config.backend) {
const timeStamp = now(); const timeStamp = now();
if (instance.config.backend && instance.config.backend.length > 0) { if (instance.config.backend && instance.config.backend.length > 0) {
if (typeof window === "undefined" && typeof WorkerGlobalScope !== "undefined" && instance.config.debug) { if (typeof window === "undefined" && typeof WorkerGlobalScope !== "undefined" && instance.config.debug) {
if (instance.config.debug) if (instance.config.debug)
log("running inside web worker"); log("running inside web worker");
} }
if (env2.browser && instance.config.backend === "tensorflow") { if (env.browser && instance.config.backend === "tensorflow") {
if (instance.config.debug) if (instance.config.debug)
log("override: backend set to tensorflow while running in browser"); log("override: backend set to tensorflow while running in browser");
instance.config.backend = "humangl"; instance.config.backend = "humangl";
} }
if (env2.node && (instance.config.backend === "webgl" || instance.config.backend === "humangl")) { if (env.node && (instance.config.backend === "webgl" || instance.config.backend === "humangl")) {
if (instance.config.debug) if (instance.config.debug)
log(`override: backend set to ${instance.config.backend} while running in nodejs`); log(`override: backend set to ${instance.config.backend} while running in nodejs`);
instance.config.backend = "tensorflow"; instance.config.backend = "tensorflow";
} }
if (env2.browser && instance.config.backend === "webgpu") { if (env.browser && instance.config.backend === "webgpu") {
if (typeof navigator === "undefined" || typeof navigator["gpu"] === "undefined") { if (typeof navigator === "undefined" || typeof navigator["gpu"] === "undefined") {
log("override: backend set to webgpu but browser does not support webgpu"); log("override: backend set to webgpu but browser does not support webgpu");
instance.config.backend = "humangl"; instance.config.backend = "humangl";
@ -10831,7 +10857,7 @@ async function check(instance, force = false) {
log("available backends:", available); log("available backends:", available);
if (!available.includes(instance.config.backend)) { if (!available.includes(instance.config.backend)) {
log(`error: backend ${instance.config.backend} not found in registry`); log(`error: backend ${instance.config.backend} not found in registry`);
instance.config.backend = env2.node ? "tensorflow" : "webgl"; instance.config.backend = env.node ? "tensorflow" : "webgl";
if (instance.config.debug) if (instance.config.debug)
log(`override: setting backend ${instance.config.backend}`); log(`override: setting backend ${instance.config.backend}`);
} }
@ -10862,7 +10888,6 @@ async function check(instance, force = false) {
if (tf25.getBackend() === "humangl") { if (tf25.getBackend() === "humangl") {
tf25.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false); tf25.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
tf25.ENV.set("WEBGL_CPU_FORWARD", true); tf25.ENV.set("WEBGL_CPU_FORWARD", true);
tf25.ENV.set("WEBGL_PACK_DEPTHWISECONV", false);
tf25.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true); tf25.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true);
tf25.ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256); tf25.ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256);
if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) { if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) {
@ -10876,16 +10901,12 @@ async function check(instance, force = false) {
} }
} }
if (tf25.getBackend() === "webgpu") { if (tf25.getBackend() === "webgpu") {
tf25.ENV.set("WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD", 512);
tf25.ENV.set("WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE", 0);
tf25.ENV.set("WEBGPU_CPU_FORWARD", true);
} }
tf25.enableProdMode(); tf25.enableProdMode();
await tf25.ready(); await tf25.ready();
instance.performance.backend = Math.trunc(now() - timeStamp); instance.performance.backend = Math.trunc(now() - timeStamp);
instance.config.backend = tf25.getBackend(); instance.config.backend = tf25.getBackend();
get(); env.updateBackend();
instance.env = env2;
} }
return true; return true;
} }
@ -10901,7 +10922,7 @@ function fakeOps(kernelNames, config3) {
}; };
tf25.registerKernel(kernelConfig); tf25.registerKernel(kernelConfig);
} }
env2.kernels = tf25.getKernelsForBackend(tf25.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); env.kernels = tf25.getKernelsForBackend(tf25.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
} }
// src/util/draw.ts // src/util/draw.ts
@ -12648,8 +12669,8 @@ async function warmupCanvas(instance) {
let img; let img;
if (typeof Image !== "undefined") if (typeof Image !== "undefined")
img = new Image(); img = new Image();
else if (env2.Image) else if (env.Image)
img = new env2.Image(); img = new env.Image();
img.onload = async () => { img.onload = async () => {
const canvas3 = canvas(img.naturalWidth, img.naturalHeight); const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
if (!canvas3) { if (!canvas3) {
@ -12703,7 +12724,7 @@ async function warmup(instance, userConfig) {
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (typeof createImageBitmap === "function") if (typeof createImageBitmap === "function")
res = await warmupBitmap(instance); res = await warmupBitmap(instance);
else if (typeof Image !== "undefined" || env2.Canvas !== void 0) else if (typeof Image !== "undefined" || env.Canvas !== void 0)
res = await warmupCanvas(instance); res = await warmupCanvas(instance);
else else
res = await warmupNode(instance); res = await warmupNode(instance);
@ -12768,11 +12789,10 @@ var Human = class {
if (this.events && this.events.dispatchEvent) if (this.events && this.events.dispatchEvent)
(_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event)); (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
}); });
get(); this.env = env;
this.env = env2;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf28.version_core}/dist/`; config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf28.version_core}/dist/`;
config.modelBasePath = this.env.browser ? "../models/" : "file://models/"; config.modelBasePath = env.browser ? "../models/" : "file://models/";
config.backend = this.env.browser ? "humangl" : "tensorflow"; config.backend = env.browser ? "humangl" : "tensorflow";
this.version = version; this.version = version;
Object.defineProperty(this, "version", { value: version }); Object.defineProperty(this, "version", { value: version });
this.config = JSON.parse(JSON.stringify(config)); this.config = JSON.parse(JSON.stringify(config));
@ -12828,7 +12848,6 @@ var Human = class {
async init() { async init() {
await check(this, true); await check(this, true);
await this.tf.ready(); await this.tf.ready();
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -12836,7 +12855,7 @@ var Human = class {
const count2 = Object.values(this.models).filter((model14) => model14).length; const count2 = Object.values(this.models).filter((model14) => model14).length;
if (userConfig) if (userConfig)
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (env2.initial) { if (env.initial) {
if (this.config.debug) if (this.config.debug)
log(`version: ${this.version}`); log(`version: ${this.version}`);
if (this.config.debug) if (this.config.debug)
@ -12852,9 +12871,9 @@ var Human = class {
} }
} }
await load15(this); await load15(this);
if (env2.initial && this.config.debug) if (env.initial && this.config.debug)
log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors"); log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors");
env2.initial = false; env.initial = false;
const loaded = Object.values(this.models).filter((model14) => model14).length; const loaded = Object.values(this.models).filter((model14) => model14).length;
if (loaded !== count2) { if (loaded !== count2) {
await validate2(this); await validate2(this);
@ -13040,6 +13059,7 @@ _checkSanity = new WeakMap();
_sanity = new WeakMap(); _sanity = new WeakMap();
// Annotate the CommonJS export names for ESM import in node: // Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = { 0 && (module.exports = {
Env,
Human, Human,
Models, Models,
defaults, defaults,

396
dist/human.node.js vendored
View File

@ -82,11 +82,12 @@ var require_tfjs_esm = __commonJS({
// src/human.ts // src/human.ts
__export(exports, { __export(exports, {
Env: () => Env,
Human: () => Human, Human: () => Human,
Models: () => Models, Models: () => Models,
default: () => Human, default: () => Human,
defaults: () => config, defaults: () => config,
env: () => env2 env: () => env
}); });
// src/util/util.ts // src/util/util.ts
@ -253,17 +254,11 @@ var config = {
} }
}; };
// src/human.ts // src/util/env.ts
var tf28 = __toModule(require_tfjs_esm()); var tf2 = __toModule(require_tfjs_esm());
// package.json
var version = "2.3.5";
// src/tfjs/humangl.ts
var tf24 = __toModule(require_tfjs_esm());
// src/image/image.ts // src/image/image.ts
var tf2 = __toModule(require_tfjs_esm()); var tf = __toModule(require_tfjs_esm());
// src/image/imagefxshaders.ts // src/image/imagefxshaders.ts
var vertexIdentity = ` var vertexIdentity = `
@ -406,7 +401,7 @@ var GLProgram = class {
this.uniform[u] = this.gl.getUniformLocation(this.id, u); this.uniform[u] = this.gl.getUniformLocation(this.id, u);
} }
}; };
function GLImageFilter(params = {}) { function GLImageFilter() {
let drawCount = 0; let drawCount = 0;
let sourceTexture = null; let sourceTexture = null;
let lastInChain = false; let lastInChain = false;
@ -415,17 +410,17 @@ function GLImageFilter(params = {}) {
let filterChain = []; let filterChain = [];
let vertexBuffer = null; let vertexBuffer = null;
let currentProgram = null; let currentProgram = null;
const canvas3 = params["canvas"] || typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(100, 100) : document.createElement("canvas"); const fxcanvas = canvas(100, 100);
const shaderProgramCache = {}; const shaderProgramCache = {};
const DRAW = { INTERMEDIATE: 1 }; const DRAW = { INTERMEDIATE: 1 };
const gl = canvas3.getContext("webgl"); const gl = fxcanvas.getContext("webgl");
if (!gl) if (!gl)
throw new Error("filter: cannot get webgl context"); throw new Error("filter: cannot get webgl context");
function resize(width, height) { function resize(width, height) {
if (width === canvas3.width && height === canvas3.height) if (width === fxcanvas.width && height === fxcanvas.height)
return; return;
canvas3.width = width; fxcanvas.width = width;
canvas3.height = height; fxcanvas.height = height;
if (!vertexBuffer) { if (!vertexBuffer) {
const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]);
vertexBuffer = gl.createBuffer(); vertexBuffer = gl.createBuffer();
@ -433,7 +428,7 @@ function GLImageFilter(params = {}) {
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true); gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
} }
gl.viewport(0, 0, canvas3.width, canvas3.height); gl.viewport(0, 0, fxcanvas.width, fxcanvas.height);
tempFramebuffers = [null, null]; tempFramebuffers = [null, null];
} }
function createFramebufferTexture(width, height) { function createFramebufferTexture(width, height) {
@ -454,7 +449,7 @@ function GLImageFilter(params = {}) {
return { fbo, texture }; return { fbo, texture };
} }
function getTempFramebuffer(index) { function getTempFramebuffer(index) {
tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(canvas3.width, canvas3.height); tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(fxcanvas.width, fxcanvas.height);
return tempFramebuffers[index]; return tempFramebuffers[index];
} }
function draw2(flags = 0) { function draw2(flags = 0) {
@ -816,8 +811,8 @@ function GLImageFilter(params = {}) {
}, },
convolution: (matrix) => { convolution: (matrix) => {
const m = new Float32Array(matrix); const m = new Float32Array(matrix);
const pixelSizeX = 1 / canvas3.width; const pixelSizeX = 1 / fxcanvas.width;
const pixelSizeY = 1 / canvas3.height; const pixelSizeY = 1 / fxcanvas.height;
const program = compileShader(convolution); const program = compileShader(convolution);
gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m); gl.uniform1fv(program == null ? void 0 : program.uniform["m"], m);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], pixelSizeX, pixelSizeY);
@ -891,8 +886,8 @@ function GLImageFilter(params = {}) {
]); ]);
}, },
blur: (size2) => { blur: (size2) => {
const blurSizeX = size2 / 7 / canvas3.width; const blurSizeX = size2 / 7 / fxcanvas.width;
const blurSizeY = size2 / 7 / canvas3.height; const blurSizeY = size2 / 7 / fxcanvas.height;
const program = compileShader(blur); const program = compileShader(blur);
gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["px"], 0, blurSizeY);
draw2(DRAW.INTERMEDIATE); draw2(DRAW.INTERMEDIATE);
@ -900,8 +895,8 @@ function GLImageFilter(params = {}) {
draw2(); draw2();
}, },
pixelate: (size2) => { pixelate: (size2) => {
const blurSizeX = size2 / canvas3.width; const blurSizeX = size2 / fxcanvas.width;
const blurSizeY = size2 / canvas3.height; const blurSizeY = size2 / fxcanvas.height;
const program = compileShader(pixelate); const program = compileShader(pixelate);
gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY); gl.uniform2f(program == null ? void 0 : program.uniform["size"], blurSizeX, blurSizeY);
draw2(); draw2();
@ -934,7 +929,7 @@ function GLImageFilter(params = {}) {
const f = filterChain[i]; const f = filterChain[i];
f.func.apply(this, f.args || []); f.func.apply(this, f.args || []);
} }
return canvas3; return fxcanvas;
}; };
this.draw = function(image25) { this.draw = function(image25) {
this.add("brightness", 0); this.add("brightness", 0);
@ -942,95 +937,6 @@ function GLImageFilter(params = {}) {
}; };
} }
// src/util/env.ts
var tf = __toModule(require_tfjs_esm());
var env2 = {
browser: void 0,
node: void 0,
worker: void 0,
platform: void 0,
agent: void 0,
initial: true,
backends: [],
offscreen: void 0,
filter: void 0,
tfjs: {
version: void 0
},
wasm: {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
},
webgl: {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
},
webgpu: {
supported: void 0,
backend: void 0,
adapter: void 0
},
kernels: [],
Canvas: void 0,
Image: void 0,
ImageData: void 0
};
async function backendInfo() {
var _a;
env2.backends = Object.keys(tf.engine().registryFactory);
env2.wasm.supported = typeof WebAssembly !== "undefined";
env2.wasm.backend = env2.backends.includes("wasm");
if (env2.wasm.supported && env2.wasm.backend && tf.getBackend() === "wasm") {
env2.wasm.simd = await tf.env().getAsync("WASM_HAS_SIMD_SUPPORT");
env2.wasm.multithread = await tf.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
env2.webgl.supported = typeof ctx !== "undefined";
env2.webgl.backend = env2.backends.includes("webgl");
if (env2.webgl.supported && env2.webgl.backend && (tf.getBackend() === "webgl" || tf.getBackend() === "humangl")) {
const gl = tf.backend().gpgpu !== "undefined" ? await tf.backend().getGPGPUContext().gl : null;
if (gl) {
env2.webgl.version = gl.getParameter(gl.VERSION);
env2.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
env2.webgpu.supported = env2.browser && typeof navigator["gpu"] !== "undefined";
env2.webgpu.backend = env2.backends.includes("webgpu");
if (env2.webgpu.supported)
env2.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
env2.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async function get() {
env2.browser = typeof navigator !== "undefined";
env2.node = typeof process !== "undefined";
env2.tfjs.version = tf.version_core;
env2.offscreen = typeof env2.offscreen === "undefined" ? typeof OffscreenCanvas !== "undefined" : env2.offscreen;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
env2.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
env2.agent = navigator.userAgent.replace(raw[0], "");
if (env2.platform[1])
env2.agent = env2.agent.replace(raw[1], "");
env2.agent = env2.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
env2.platform = `${process.platform} ${process.arch}`;
env2.agent = `NodeJS ${process.version}`;
}
env2.worker = env2.browser && env2.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
await backendInfo();
}
async function set(obj) {
env2 = mergeDeep(env2, obj);
}
// src/image/image.ts // src/image/image.ts
var maxSize = 2048; var maxSize = 2048;
var inCanvas = null; var inCanvas = null;
@ -1039,8 +945,8 @@ var tmpCanvas = null;
var fx; var fx;
function canvas(width, height) { function canvas(width, height) {
let c; let c;
if (env2.browser) { if (env.browser) {
if (env2.offscreen) { if (env.offscreen) {
c = new OffscreenCanvas(width, height); c = new OffscreenCanvas(width, height);
} else { } else {
if (typeof document === "undefined") if (typeof document === "undefined")
@ -1050,8 +956,8 @@ function canvas(width, height) {
c.height = height; c.height = height;
} }
} else { } else {
if (typeof env2.Canvas !== "undefined") if (typeof env.Canvas !== "undefined")
c = new env2.Canvas(width, height); c = new env.Canvas(width, height);
else if (typeof globalThis.Canvas !== "undefined") else if (typeof globalThis.Canvas !== "undefined")
c = new globalThis.Canvas(width, height); c = new globalThis.Canvas(width, height);
} }
@ -1069,16 +975,16 @@ function process2(input, config3, getTensor = true) {
log("input is missing"); log("input is missing");
return { tensor: null, canvas: null }; return { tensor: null, canvas: null };
} }
if (!(input instanceof tf2.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env2.Canvas !== "undefined" && input instanceof env2.Canvas) && !(typeof globalThis.Canvas !== "undefined" && input instanceof globalThis.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) { if (!(input instanceof tf.Tensor) && !(typeof Image !== "undefined" && input instanceof Image) && !(typeof env.Canvas !== "undefined" && input instanceof env.Canvas) && !(typeof globalThis.Canvas !== "undefined" && input instanceof globalThis.Canvas) && !(typeof ImageData !== "undefined" && input instanceof ImageData) && !(typeof ImageBitmap !== "undefined" && input instanceof ImageBitmap) && !(typeof HTMLImageElement !== "undefined" && input instanceof HTMLImageElement) && !(typeof HTMLMediaElement !== "undefined" && input instanceof HTMLMediaElement) && !(typeof HTMLVideoElement !== "undefined" && input instanceof HTMLVideoElement) && !(typeof HTMLCanvasElement !== "undefined" && input instanceof HTMLCanvasElement) && !(typeof OffscreenCanvas !== "undefined" && input instanceof OffscreenCanvas)) {
throw new Error("input type is not recognized"); throw new Error("input type is not recognized");
} }
if (input instanceof tf2.Tensor) { if (input instanceof tf.Tensor) {
if (input["isDisposedInternal"]) { if (input["isDisposedInternal"]) {
throw new Error("input tensor is disposed"); throw new Error("input tensor is disposed");
} else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) { } else if (!input.shape || input.shape.length !== 4 || input.shape[0] !== 1 || input.shape[3] !== 3) {
throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input["shape"]}`); throw new Error(`input tensor shape must be [1, height, width, 3] and instead was ${input["shape"]}`);
} else { } else {
return { tensor: tf2.clone(input), canvas: config3.filter.return ? outCanvas : null }; return { tensor: tf.clone(input), canvas: config3.filter.return ? outCanvas : null };
} }
} else { } else {
if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) { if (typeof input["readyState"] !== "undefined" && input["readyState"] <= 2) {
@ -1130,10 +1036,10 @@ function process2(input, config3, getTensor = true) {
} }
if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height)) if (!outCanvas || inCanvas.width !== outCanvas.width || (inCanvas == null ? void 0 : inCanvas.height) !== (outCanvas == null ? void 0 : outCanvas.height))
outCanvas = canvas(inCanvas.width, inCanvas.height); outCanvas = canvas(inCanvas.width, inCanvas.height);
if (config3.filter.enabled && env2.webgl.supported) { if (config3.filter.enabled && env.webgl.supported) {
if (!fx) if (!fx)
fx = env2.browser ? new GLImageFilter({ canvas: outCanvas }) : null; fx = env.browser ? new GLImageFilter() : null;
env2.filter = !!fx; env.filter = !!fx;
if (!fx) if (!fx)
return { tensor: null, canvas: inCanvas }; return { tensor: null, canvas: inCanvas };
fx.reset(); fx.reset();
@ -1173,7 +1079,7 @@ function process2(input, config3, getTensor = true) {
copy(inCanvas, outCanvas); copy(inCanvas, outCanvas);
if (fx) if (fx)
fx = null; fx = null;
env2.filter = !!fx; env.filter = !!fx;
} }
if (!getTensor) if (!getTensor)
return { tensor: null, canvas: outCanvas }; return { tensor: null, canvas: outCanvas };
@ -1182,22 +1088,22 @@ function process2(input, config3, getTensor = true) {
let pixels; let pixels;
let depth = 3; let depth = 3;
if (typeof ImageData !== "undefined" && input instanceof ImageData || input["data"] && input["width"] && input["height"]) { if (typeof ImageData !== "undefined" && input instanceof ImageData || input["data"] && input["width"] && input["height"]) {
if (env2.browser && tf2.browser) { if (env.browser && tf.browser) {
pixels = tf2.browser ? tf2.browser.fromPixels(input) : null; pixels = tf.browser ? tf.browser.fromPixels(input) : null;
} else { } else {
depth = input["data"].length / input["height"] / input["width"]; depth = input["data"].length / input["height"] / input["width"];
const arr = new Uint8Array(input["data"]["buffer"]); const arr = new Uint8Array(input["data"]["buffer"]);
pixels = tf2.tensor(arr, [input["height"], input["width"], depth], "int32"); pixels = tf.tensor(arr, [input["height"], input["width"], depth], "int32");
} }
} else { } else {
if (!tmpCanvas || outCanvas.width !== tmpCanvas.width || (outCanvas == null ? void 0 : outCanvas.height) !== (tmpCanvas == null ? void 0 : tmpCanvas.height)) if (!tmpCanvas || outCanvas.width !== tmpCanvas.width || (outCanvas == null ? void 0 : outCanvas.height) !== (tmpCanvas == null ? void 0 : tmpCanvas.height))
tmpCanvas = canvas(outCanvas.width, outCanvas.height); tmpCanvas = canvas(outCanvas.width, outCanvas.height);
if (tf2.browser && env2.browser) { if (tf.browser && env.browser) {
if (config3.backend === "webgl" || config3.backend === "humangl" || config3.backend === "webgpu") { if (config3.backend === "webgl" || config3.backend === "humangl" || config3.backend === "webgpu") {
pixels = tf2.browser.fromPixels(outCanvas); pixels = tf.browser.fromPixels(outCanvas);
} else { } else {
tmpCanvas = copy(outCanvas); tmpCanvas = copy(outCanvas);
pixels = tf2.browser.fromPixels(tmpCanvas); pixels = tf.browser.fromPixels(tmpCanvas);
} }
} else { } else {
const tempCanvas = copy(outCanvas); const tempCanvas = copy(outCanvas);
@ -1205,19 +1111,19 @@ function process2(input, config3, getTensor = true) {
const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight); const tempData = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
depth = tempData.data.length / targetWidth / targetHeight; depth = tempData.data.length / targetWidth / targetHeight;
const arr = new Uint8Array(tempData.data.buffer); const arr = new Uint8Array(tempData.data.buffer);
pixels = tf2.tensor(arr, [targetWidth, targetHeight, depth]); pixels = tf.tensor(arr, [targetWidth, targetHeight, depth]);
} }
} }
if (depth === 4) { if (depth === 4) {
const rgb2 = tf2.slice3d(pixels, [0, 0, 0], [-1, -1, 3]); const rgb2 = tf.slice3d(pixels, [0, 0, 0], [-1, -1, 3]);
tf2.dispose(pixels); tf.dispose(pixels);
pixels = rgb2; pixels = rgb2;
} }
if (!pixels) if (!pixels)
throw new Error("cannot create tensor from input"); throw new Error("cannot create tensor from input");
const casted = tf2.cast(pixels, "float32"); const casted = tf.cast(pixels, "float32");
const tensor3 = tf2.expandDims(casted, 0); const tensor3 = tf.expandDims(casted, 0);
tf2.dispose([pixels, casted]); tf.dispose([pixels, casted]);
return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null }; return { tensor: tensor3, canvas: config3.filter.return ? outCanvas : null };
} }
} }
@ -1226,11 +1132,11 @@ var lastCacheDiff = 1;
var benchmarked = 0; var benchmarked = 0;
var checksum = async (input) => { var checksum = async (input) => {
const resizeFact = 48; const resizeFact = 48;
const reduced = tf2.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]); const reduced = tf.image.resizeBilinear(input, [Math.trunc((input.shape[1] || 1) / resizeFact), Math.trunc((input.shape[2] || 1) / resizeFact)]);
const tfSum = async () => { const tfSum = async () => {
const sumT = tf2.sum(reduced); const sumT = tf.sum(reduced);
const sum0 = await sumT.data(); const sum0 = await sumT.data();
tf2.dispose(sumT); tf.dispose(sumT);
return sum0[0]; return sum0[0];
}; };
const jsSum = async () => { const jsSum = async () => {
@ -1249,7 +1155,7 @@ var checksum = async (input) => {
benchmarked = t1 - t0 < t2 - t1 ? 1 : 2; benchmarked = t1 - t0 < t2 - t1 ? 1 : 2;
} }
const res = benchmarked === 1 ? await jsSum() : await tfSum(); const res = benchmarked === 1 ? await jsSum() : await tfSum();
tf2.dispose(reduced); tf.dispose(reduced);
return res; return res;
}; };
async function skip(config3, input) { async function skip(config3, input) {
@ -1264,12 +1170,132 @@ async function skip(config3, input) {
return skipFrame; return skipFrame;
} }
// src/util/env.ts
var Env = class {
constructor() {
__publicField(this, "browser");
__publicField(this, "node");
__publicField(this, "worker");
__publicField(this, "platform", "");
__publicField(this, "agent", "");
__publicField(this, "backends", []);
__publicField(this, "initial");
__publicField(this, "filter");
__publicField(this, "tfjs");
__publicField(this, "offscreen");
__publicField(this, "wasm", {
supported: void 0,
backend: void 0,
simd: void 0,
multithread: void 0
});
__publicField(this, "webgl", {
supported: void 0,
backend: void 0,
version: void 0,
renderer: void 0
});
__publicField(this, "webgpu", {
supported: void 0,
backend: void 0,
adapter: void 0
});
__publicField(this, "cpu", {
model: void 0,
flags: []
});
__publicField(this, "kernels", []);
__publicField(this, "Canvas");
__publicField(this, "Image");
__publicField(this, "ImageData");
this.browser = typeof navigator !== "undefined";
this.node = typeof process !== "undefined";
this.tfjs = { version: tf2.version_core };
this.offscreen = typeof OffscreenCanvas !== "undefined";
this.initial = true;
this.worker = this.browser && this.offscreen ? typeof WorkerGlobalScope !== "undefined" : void 0;
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
this.platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
this.agent = navigator.userAgent.replace(raw[0], "");
if (this.platform[1])
this.agent = this.agent.replace(raw[1], "");
this.agent = this.agent.replace(/ /g, " ");
}
} else if (typeof process !== "undefined") {
this.platform = `${process.platform} ${process.arch}`;
this.agent = `NodeJS ${process.version}`;
}
}
async updateBackend() {
var _a;
this.backends = Object.keys(tf2.engine().registryFactory);
this.wasm.supported = typeof WebAssembly !== "undefined";
this.wasm.backend = this.backends.includes("wasm");
if (this.wasm.supported && this.wasm.backend && tf2.getBackend() === "wasm") {
this.wasm.simd = await tf2.env().getAsync("WASM_HAS_SIMD_SUPPORT");
this.wasm.multithread = await tf2.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");
}
const c = canvas(100, 100);
const ctx = c ? c.getContext("webgl2") : void 0;
this.webgl.supported = typeof ctx !== "undefined";
this.webgl.backend = this.backends.includes("webgl");
if (this.webgl.supported && this.webgl.backend && (tf2.getBackend() === "webgl" || tf2.getBackend() === "humangl")) {
const gl = tf2.backend().gpgpu !== "undefined" ? await tf2.backend().getGPGPUContext().gl : null;
if (gl) {
this.webgl.version = gl.getParameter(gl.VERSION);
this.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
this.webgpu.supported = this.browser && typeof navigator["gpu"] !== "undefined";
this.webgpu.backend = this.backends.includes("webgpu");
if (this.webgpu.supported)
this.webgpu.adapter = (_a = await navigator["gpu"].requestAdapter()) == null ? void 0 : _a.name;
this.kernels = tf2.getKernelsForBackend(tf2.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
}
async updateCPU() {
var _a;
const cpu = { model: "", flags: [] };
if (this.node && ((_a = this.platform) == null ? void 0 : _a.startsWith("linux"))) {
const fs = require("fs");
try {
const data = fs.readFileSync("/proc/cpuinfo").toString();
for (const line of data.split("\n")) {
if (line.startsWith("model name")) {
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
}
if (line.startsWith("flags")) {
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
}
}
} catch (e) {
}
}
if (!this["cpu"])
Object.defineProperty(this, "cpu", { value: cpu });
else
this["cpu"] = cpu;
}
};
var env = new Env();
// src/human.ts
var tf28 = __toModule(require_tfjs_esm());
// package.json
var version = "2.3.5";
// src/tfjs/humangl.ts
var tf24 = __toModule(require_tfjs_esm());
// src/gear/gear-agegenderrace.ts // src/gear/gear-agegenderrace.ts
var tf3 = __toModule(require_tfjs_esm()); var tf3 = __toModule(require_tfjs_esm());
var model; var model;
var skipped = Number.MAX_SAFE_INTEGER; var skipped = Number.MAX_SAFE_INTEGER;
async function load(config3) { async function load(config3) {
if (env2.initial) if (env.initial)
model = null; model = null;
if (!model) { if (!model) {
model = await tf3.loadGraphModel(join(config3.modelBasePath, config3.face.agegenderrace.modelPath)); model = await tf3.loadGraphModel(join(config3.modelBasePath, config3.face.agegenderrace.modelPath));
@ -1290,7 +1316,7 @@ var skipped2 = Number.MAX_SAFE_INTEGER;
var lastCount = 0; var lastCount = 0;
async function load2(config3) { async function load2(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model2 = null; model2 = null;
if (!model2) { if (!model2) {
model2 = await tf4.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.antispoof) == null ? void 0 : _a.modelPath) || "")); model2 = await tf4.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.antispoof) == null ? void 0 : _a.modelPath) || ""));
@ -4755,7 +4781,7 @@ var inputSize = 0;
var size = () => inputSize; var size = () => inputSize;
async function load3(config3) { async function load3(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model3 = null; model3 = null;
if (!model3) { if (!model3) {
model3 = await tf6.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.detector) == null ? void 0 : _a.modelPath) || "")); model3 = await tf6.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.detector) == null ? void 0 : _a.modelPath) || ""));
@ -5121,7 +5147,7 @@ var inputSize3 = 0;
var last = []; var last = [];
var skipped4 = Number.MAX_SAFE_INTEGER; var skipped4 = Number.MAX_SAFE_INTEGER;
async function load4(config3) { async function load4(config3) {
if (env2.initial) if (env.initial)
model4 = null; model4 = null;
if (!model4) { if (!model4) {
fakeOps(["floormod"], config3); fakeOps(["floormod"], config3);
@ -5188,7 +5214,7 @@ async function predict3(input, config3) {
return last; return last;
} }
skipped4 = 0; skipped4 = 0;
if (!env2.kernels.includes("mod") || !env2.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last; return last;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [input.shape[2], input.shape[1]]; const outputSize2 = [input.shape[2], input.shape[1]];
@ -5242,7 +5268,7 @@ var model5;
var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} }; var cache2 = { id: 0, keypoints: [], box: [0, 0, 0, 0], boxRaw: [0, 0, 0, 0], score: 0, annotations: {} };
var skipped5 = Number.MAX_SAFE_INTEGER; var skipped5 = Number.MAX_SAFE_INTEGER;
async function load5(config3) { async function load5(config3) {
if (env2.initial) if (env.initial)
model5 = null; model5 = null;
if (!model5) { if (!model5) {
model5 = await tf9.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || "")); model5 = await tf9.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || ""));
@ -5356,7 +5382,7 @@ var skipped6 = Number.MAX_SAFE_INTEGER;
var rgb = [0.2989, 0.587, 0.114]; var rgb = [0.2989, 0.587, 0.114];
async function load6(config3) { async function load6(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model6 = null; model6 = null;
if (!model6) { if (!model6) {
model6 = await tf10.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.emotion) == null ? void 0 : _a.modelPath) || "")); model6 = await tf10.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.emotion) == null ? void 0 : _a.modelPath) || ""));
@ -5434,7 +5460,7 @@ var irisLandmarks = {
}; };
async function load7(config3) { async function load7(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model7 = null; model7 = null;
if (!model7) { if (!model7) {
model7 = await tf11.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.iris) == null ? void 0 : _a.modelPath) || "")); model7 = await tf11.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.iris) == null ? void 0 : _a.modelPath) || ""));
@ -5479,7 +5505,7 @@ var getEyeBox = (rawCoords, face5, eyeInnerCornerIndex, eyeOuterCornerIndex, fli
box4.endPoint[1] / meshSize, box4.endPoint[1] / meshSize,
box4.endPoint[0] / meshSize box4.endPoint[0] / meshSize
]], [0], [inputSize4, inputSize4]); ]], [0], [inputSize4, inputSize4]);
if (flip && env2.kernels.includes("flipleftright")) { if (flip && env.kernels.includes("flipleftright")) {
const flipped = tf11.image.flipLeftRight(crop2); const flipped = tf11.image.flipLeftRight(crop2);
tf11.dispose(crop2); tf11.dispose(crop2);
crop2 = flipped; crop2 = flipped;
@ -5593,7 +5619,7 @@ async function predict6(input, config3) {
faceScore: 0, faceScore: 0,
annotations: {} annotations: {}
}; };
if (((_d = config3.face.detector) == null ? void 0 : _d.rotation) && ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) && env2.kernels.includes("rotatewithoffset")) { if (((_d = config3.face.detector) == null ? void 0 : _d.rotation) && ((_e = config3.face.mesh) == null ? void 0 : _e.enabled) && env.kernels.includes("rotatewithoffset")) {
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} else { } else {
rotationMatrix = IDENTITY_MATRIX; rotationMatrix = IDENTITY_MATRIX;
@ -5635,7 +5661,7 @@ async function predict6(input, config3) {
box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence }; box4 = { ...enlargeBox(calculateLandmarksBoundingBox(face5.mesh), 1.5), confidence: box4.confidence };
for (const key of Object.keys(meshAnnotations)) for (const key of Object.keys(meshAnnotations))
face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]); face5.annotations[key] = meshAnnotations[key].map((index) => face5.mesh[index]);
if (((_j = config3.face.detector) == null ? void 0 : _j.rotation) && config3.face.mesh.enabled && ((_k = config3.face.description) == null ? void 0 : _k.enabled) && env2.kernels.includes("rotatewithoffset")) { if (((_j = config3.face.detector) == null ? void 0 : _j.rotation) && config3.face.mesh.enabled && ((_k = config3.face.description) == null ? void 0 : _k.enabled) && env.kernels.includes("rotatewithoffset")) {
tf12.dispose(face5.tensor); tf12.dispose(face5.tensor);
[angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5); [angle, rotationMatrix, face5.tensor] = correctFaceRotation(box4, input, inputSize5);
} }
@ -5659,7 +5685,7 @@ async function predict6(input, config3) {
} }
async function load8(config3) { async function load8(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
model8 = null; model8 = null;
if (!model8) { if (!model8) {
model8 = await tf12.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.mesh) == null ? void 0 : _a.modelPath) || "")); model8 = await tf12.loadGraphModel(join(config3.modelBasePath, ((_a = config3.face.mesh) == null ? void 0 : _a.modelPath) || ""));
@ -5686,7 +5712,7 @@ var skipped8 = Number.MAX_SAFE_INTEGER;
async function load9(config3) { async function load9(config3) {
var _a, _b; var _a, _b;
const modelUrl = join(config3.modelBasePath, ((_a = config3.face.description) == null ? void 0 : _a.modelPath) || ""); const modelUrl = join(config3.modelBasePath, ((_a = config3.face.description) == null ? void 0 : _a.modelPath) || "");
if (env2.initial) if (env.initial)
model9 = null; model9 = null;
if (!model9) { if (!model9) {
model9 = await tf13.loadGraphModel(modelUrl); model9 = await tf13.loadGraphModel(modelUrl);
@ -8991,7 +9017,7 @@ var HandPipeline = class {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0; const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox); const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]]; const palmCenterNormalized = [palmCenter[0] / image25.shape[2], palmCenter[1] / image25.shape[1]];
const rotatedImage = config3.hand.rotation && env2.kernels.includes("rotatewithoffset") ? tf16.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone(); const rotatedImage = config3.hand.rotation && env.kernels.includes("rotatewithoffset") ? tf16.image.rotateWithOffset(image25, angle, 0, palmCenterNormalized) : image25.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter); const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox; const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]); const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9493,7 +9519,7 @@ async function predict8(input, config3) {
} }
async function load10(config3) { async function load10(config3) {
var _a, _b, _c, _d, _e, _f; var _a, _b, _c, _d, _e, _f;
if (env2.initial) { if (env.initial) {
handDetectorModel = null; handDetectorModel = null;
handPoseModel = null; handPoseModel = null;
} }
@ -9583,7 +9609,7 @@ var fingerMap = {
}; };
async function loadDetect2(config3) { async function loadDetect2(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
models2[0] = null; models2[0] = null;
if (!models2[0]) { if (!models2[0]) {
fakeOps(["tensorlistreserve", "enter", "tensorlistfromtensor", "merge", "loopcond", "switch", "exit", "tensorliststack", "nextiteration", "tensorlistsetitem", "tensorlistgetitem", "reciprocal", "shape", "split", "where"], config3); fakeOps(["tensorlistreserve", "enter", "tensorlistfromtensor", "merge", "loopcond", "switch", "exit", "tensorliststack", "nextiteration", "tensorlistsetitem", "tensorlistgetitem", "reciprocal", "shape", "split", "where"], config3);
@ -9601,7 +9627,7 @@ async function loadDetect2(config3) {
} }
async function loadSkeleton(config3) { async function loadSkeleton(config3) {
var _a, _b; var _a, _b;
if (env2.initial) if (env.initial)
models2[1] = null; models2[1] = null;
if (!models2[1]) { if (!models2[1]) {
models2[1] = await tf18.loadGraphModel(join(config3.modelBasePath, ((_a = config3.hand.skeleton) == null ? void 0 : _a.modelPath) || "")); models2[1] = await tf18.loadGraphModel(join(config3.modelBasePath, ((_a = config3.hand.skeleton) == null ? void 0 : _a.modelPath) || ""));
@ -9910,7 +9936,7 @@ var cache5 = {
bodies: [] bodies: []
}; };
async function load11(config3) { async function load11(config3) {
if (env2.initial) if (env.initial)
model10 = null; model10 = null;
if (!model10) { if (!model10) {
fakeOps(["size"], config3); fakeOps(["size"], config3);
@ -10043,7 +10069,7 @@ var last4 = [];
var skipped11 = Number.MAX_SAFE_INTEGER; var skipped11 = Number.MAX_SAFE_INTEGER;
var scaleBox = 2.5; var scaleBox = 2.5;
async function load12(config3) { async function load12(config3) {
if (!model11 || env2.initial) { if (!model11 || env.initial) {
model11 = await tf21.loadGraphModel(join(config3.modelBasePath, config3.object.modelPath || "")); model11 = await tf21.loadGraphModel(join(config3.modelBasePath, config3.object.modelPath || ""));
const inputs = Object.values(model11.modelSignature["inputs"]); const inputs = Object.values(model11.modelSignature["inputs"]);
model11.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null; model11.inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : null;
@ -10124,7 +10150,7 @@ async function predict11(image25, config3) {
return last4; return last4;
} }
skipped11 = 0; skipped11 = 0;
if (!env2.kernels.includes("mod") || !env2.kernels.includes("sparsetodense")) if (!env.kernels.includes("mod") || !env.kernels.includes("sparsetodense"))
return last4; return last4;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const outputSize2 = [image25.shape[2], image25.shape[1]]; const outputSize2 = [image25.shape[2], image25.shape[1]];
@ -10475,7 +10501,7 @@ async function predict12(input, config3) {
return scaled; return scaled;
} }
async function load13(config3) { async function load13(config3) {
if (!model12 || env2.initial) { if (!model12 || env.initial) {
model12 = await tf22.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || "")); model12 = await tf22.loadGraphModel(join(config3.modelBasePath, config3.body.modelPath || ""));
if (!model12 || !model12["modelUrl"]) if (!model12 || !model12["modelUrl"])
log("load model failed:", config3.body.modelPath); log("load model failed:", config3.body.modelPath);
@ -10491,7 +10517,7 @@ var tf23 = __toModule(require_tfjs_esm());
var model13; var model13;
var busy = false; var busy = false;
async function load14(config3) { async function load14(config3) {
if (!model13 || env2.initial) { if (!model13 || env.initial) {
model13 = await tf23.loadGraphModel(join(config3.modelBasePath, config3.segmentation.modelPath || "")); model13 = await tf23.loadGraphModel(join(config3.modelBasePath, config3.segmentation.modelPath || ""));
if (!model13 || !model13["modelUrl"]) if (!model13 || !model13["modelUrl"])
log("load model failed:", config3.segmentation.modelPath); log("load model failed:", config3.segmentation.modelPath);
@ -10530,7 +10556,7 @@ async function process5(input, background, config3) {
t.data = tf23.image.resizeBilinear(t.squeeze, [height, width]); t.data = tf23.image.resizeBilinear(t.squeeze, [height, width]);
} }
const data = Array.from(await t.data.data()); const data = Array.from(await t.data.data());
if (env2.node && !env2.Canvas && typeof ImageData === "undefined") { if (env.node && !env.Canvas && typeof ImageData === "undefined") {
if (config3.debug) if (config3.debug)
log("canvas support missing"); log("canvas support missing");
Object.keys(t).forEach((tensor3) => tf23.dispose(t[tensor3])); Object.keys(t).forEach((tensor3) => tf23.dispose(t[tensor3]));
@ -10602,7 +10628,7 @@ function reset(instance) {
} }
async function load15(instance) { async function load15(instance) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
if (env2.initial) if (env.initial)
reset(instance); reset(instance);
if (instance.config.hand.enabled) { if (instance.config.hand.enabled) {
if (!instance.models.handpose && ((_b = (_a = instance.config.hand.detector) == null ? void 0 : _a.modelPath) == null ? void 0 : _b.includes("handdetect"))) if (!instance.models.handpose && ((_b = (_a = instance.config.hand.detector) == null ? void 0 : _a.modelPath) == null ? void 0 : _b.includes("handdetect")))
@ -10796,24 +10822,24 @@ async function register(instance) {
var tf25 = __toModule(require_tfjs_esm()); var tf25 = __toModule(require_tfjs_esm());
async function check(instance, force = false) { async function check(instance, force = false) {
instance.state = "backend"; instance.state = "backend";
if (force || env2.initial || instance.config.backend && instance.config.backend.length > 0 && tf25.getBackend() !== instance.config.backend) { if (force || env.initial || instance.config.backend && instance.config.backend.length > 0 && tf25.getBackend() !== instance.config.backend) {
const timeStamp = now(); const timeStamp = now();
if (instance.config.backend && instance.config.backend.length > 0) { if (instance.config.backend && instance.config.backend.length > 0) {
if (typeof window === "undefined" && typeof WorkerGlobalScope !== "undefined" && instance.config.debug) { if (typeof window === "undefined" && typeof WorkerGlobalScope !== "undefined" && instance.config.debug) {
if (instance.config.debug) if (instance.config.debug)
log("running inside web worker"); log("running inside web worker");
} }
if (env2.browser && instance.config.backend === "tensorflow") { if (env.browser && instance.config.backend === "tensorflow") {
if (instance.config.debug) if (instance.config.debug)
log("override: backend set to tensorflow while running in browser"); log("override: backend set to tensorflow while running in browser");
instance.config.backend = "humangl"; instance.config.backend = "humangl";
} }
if (env2.node && (instance.config.backend === "webgl" || instance.config.backend === "humangl")) { if (env.node && (instance.config.backend === "webgl" || instance.config.backend === "humangl")) {
if (instance.config.debug) if (instance.config.debug)
log(`override: backend set to ${instance.config.backend} while running in nodejs`); log(`override: backend set to ${instance.config.backend} while running in nodejs`);
instance.config.backend = "tensorflow"; instance.config.backend = "tensorflow";
} }
if (env2.browser && instance.config.backend === "webgpu") { if (env.browser && instance.config.backend === "webgpu") {
if (typeof navigator === "undefined" || typeof navigator["gpu"] === "undefined") { if (typeof navigator === "undefined" || typeof navigator["gpu"] === "undefined") {
log("override: backend set to webgpu but browser does not support webgpu"); log("override: backend set to webgpu but browser does not support webgpu");
instance.config.backend = "humangl"; instance.config.backend = "humangl";
@ -10830,7 +10856,7 @@ async function check(instance, force = false) {
log("available backends:", available); log("available backends:", available);
if (!available.includes(instance.config.backend)) { if (!available.includes(instance.config.backend)) {
log(`error: backend ${instance.config.backend} not found in registry`); log(`error: backend ${instance.config.backend} not found in registry`);
instance.config.backend = env2.node ? "tensorflow" : "webgl"; instance.config.backend = env.node ? "tensorflow" : "webgl";
if (instance.config.debug) if (instance.config.debug)
log(`override: setting backend ${instance.config.backend}`); log(`override: setting backend ${instance.config.backend}`);
} }
@ -10861,7 +10887,6 @@ async function check(instance, force = false) {
if (tf25.getBackend() === "humangl") { if (tf25.getBackend() === "humangl") {
tf25.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false); tf25.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
tf25.ENV.set("WEBGL_CPU_FORWARD", true); tf25.ENV.set("WEBGL_CPU_FORWARD", true);
tf25.ENV.set("WEBGL_PACK_DEPTHWISECONV", false);
tf25.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true); tf25.ENV.set("WEBGL_USE_SHAPES_UNIFORMS", true);
tf25.ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256); tf25.ENV.set("CPU_HANDOFF_SIZE_THRESHOLD", 256);
if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) { if (typeof instance.config["deallocate"] !== "undefined" && instance.config["deallocate"]) {
@ -10875,16 +10900,12 @@ async function check(instance, force = false) {
} }
} }
if (tf25.getBackend() === "webgpu") { if (tf25.getBackend() === "webgpu") {
tf25.ENV.set("WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD", 512);
tf25.ENV.set("WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE", 0);
tf25.ENV.set("WEBGPU_CPU_FORWARD", true);
} }
tf25.enableProdMode(); tf25.enableProdMode();
await tf25.ready(); await tf25.ready();
instance.performance.backend = Math.trunc(now() - timeStamp); instance.performance.backend = Math.trunc(now() - timeStamp);
instance.config.backend = tf25.getBackend(); instance.config.backend = tf25.getBackend();
get(); env.updateBackend();
instance.env = env2;
} }
return true; return true;
} }
@ -10900,7 +10921,7 @@ function fakeOps(kernelNames, config3) {
}; };
tf25.registerKernel(kernelConfig); tf25.registerKernel(kernelConfig);
} }
env2.kernels = tf25.getKernelsForBackend(tf25.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); env.kernels = tf25.getKernelsForBackend(tf25.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
} }
// src/util/draw.ts // src/util/draw.ts
@ -12647,8 +12668,8 @@ async function warmupCanvas(instance) {
let img; let img;
if (typeof Image !== "undefined") if (typeof Image !== "undefined")
img = new Image(); img = new Image();
else if (env2.Image) else if (env.Image)
img = new env2.Image(); img = new env.Image();
img.onload = async () => { img.onload = async () => {
const canvas3 = canvas(img.naturalWidth, img.naturalHeight); const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
if (!canvas3) { if (!canvas3) {
@ -12702,7 +12723,7 @@ async function warmup(instance, userConfig) {
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (typeof createImageBitmap === "function") if (typeof createImageBitmap === "function")
res = await warmupBitmap(instance); res = await warmupBitmap(instance);
else if (typeof Image !== "undefined" || env2.Canvas !== void 0) else if (typeof Image !== "undefined" || env.Canvas !== void 0)
res = await warmupCanvas(instance); res = await warmupCanvas(instance);
else else
res = await warmupNode(instance); res = await warmupNode(instance);
@ -12767,11 +12788,10 @@ var Human = class {
if (this.events && this.events.dispatchEvent) if (this.events && this.events.dispatchEvent)
(_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event)); (_a = this.events) == null ? void 0 : _a.dispatchEvent(new Event(event));
}); });
get(); this.env = env;
this.env = env2;
config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf28.version_core}/dist/`; config.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf28.version_core}/dist/`;
config.modelBasePath = this.env.browser ? "../models/" : "file://models/"; config.modelBasePath = env.browser ? "../models/" : "file://models/";
config.backend = this.env.browser ? "humangl" : "tensorflow"; config.backend = env.browser ? "humangl" : "tensorflow";
this.version = version; this.version = version;
Object.defineProperty(this, "version", { value: version }); Object.defineProperty(this, "version", { value: version });
this.config = JSON.parse(JSON.stringify(config)); this.config = JSON.parse(JSON.stringify(config));
@ -12827,7 +12847,6 @@ var Human = class {
async init() { async init() {
await check(this, true); await check(this, true);
await this.tf.ready(); await this.tf.ready();
set(this.env);
} }
async load(userConfig) { async load(userConfig) {
this.state = "load"; this.state = "load";
@ -12835,7 +12854,7 @@ var Human = class {
const count2 = Object.values(this.models).filter((model14) => model14).length; const count2 = Object.values(this.models).filter((model14) => model14).length;
if (userConfig) if (userConfig)
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
if (env2.initial) { if (env.initial) {
if (this.config.debug) if (this.config.debug)
log(`version: ${this.version}`); log(`version: ${this.version}`);
if (this.config.debug) if (this.config.debug)
@ -12851,9 +12870,9 @@ var Human = class {
} }
} }
await load15(this); await load15(this);
if (env2.initial && this.config.debug) if (env.initial && this.config.debug)
log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors"); log("tf engine state:", this.tf.engine().state.numBytes, "bytes", this.tf.engine().state.numTensors, "tensors");
env2.initial = false; env.initial = false;
const loaded = Object.values(this.models).filter((model14) => model14).length; const loaded = Object.values(this.models).filter((model14) => model14).length;
if (loaded !== count2) { if (loaded !== count2) {
await validate2(this); await validate2(this);
@ -13039,6 +13058,7 @@ _checkSanity = new WeakMap();
_sanity = new WeakMap(); _sanity = new WeakMap();
// Annotate the CommonJS export names for ESM import in node: // Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = { 0 && (module.exports = {
Env,
Human, Human,
Models, Models,
defaults, defaults,

View File

@ -5,6 +5,7 @@
// module imports // module imports
import { log, now, mergeDeep, validate } from './util/util'; import { log, now, mergeDeep, validate } from './util/util';
import { defaults } from './config'; import { defaults } from './config';
import { env, Env } from './util/env';
import * as tf from '../dist/tfjs.esm.js'; import * as tf from '../dist/tfjs.esm.js';
import * as app from '../package.json'; import * as app from '../package.json';
import * as backend from './tfjs/backend'; import * as backend from './tfjs/backend';
@ -12,7 +13,6 @@ import * as blazepose from './body/blazepose';
import * as centernet from './object/centernet'; import * as centernet from './object/centernet';
import * as draw from './util/draw'; import * as draw from './util/draw';
import * as efficientpose from './body/efficientpose'; import * as efficientpose from './body/efficientpose';
import * as env from './util/env';
import * as face from './face/face'; import * as face from './face/face';
import * as facemesh from './face/facemesh'; import * as facemesh from './face/facemesh';
import * as faceres from './face/faceres'; import * as faceres from './face/faceres';
@ -125,7 +125,7 @@ export class Human {
tf: TensorFlow; tf: TensorFlow;
/** Object containing environment information used for diagnostics */ /** Object containing environment information used for diagnostics */
env: env.Env; env: Env;
/** Draw helper classes that can draw detected objects on canvas using specified draw /** Draw helper classes that can draw detected objects on canvas using specified draw
* - options: {@link DrawOptions} global settings for all draw operations, can be overriden for each draw method * - options: {@link DrawOptions} global settings for all draw operations, can be overriden for each draw method
@ -174,11 +174,10 @@ export class Human {
* @return instance: {@link Human} * @return instance: {@link Human}
*/ */
constructor(userConfig?: Partial<Config>) { constructor(userConfig?: Partial<Config>) {
env.get(); this.env = env;
this.env = env.env;
defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`; defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`;
defaults.modelBasePath = this.env.browser ? '../models/' : 'file://models/'; defaults.modelBasePath = env.browser ? '../models/' : 'file://models/';
defaults.backend = this.env.browser ? 'humangl' : 'tensorflow'; defaults.backend = env.browser ? 'humangl' : 'tensorflow';
this.version = app.version; // expose version property on instance of class this.version = app.version; // expose version property on instance of class
Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself
this.config = JSON.parse(JSON.stringify(defaults)); this.config = JSON.parse(JSON.stringify(defaults));
@ -311,7 +310,6 @@ export class Human {
async init(): Promise<void> { async init(): Promise<void> {
await backend.check(this, true); await backend.check(this, true);
await this.tf.ready(); await this.tf.ready();
env.set(this.env);
} }
/** Load method preloads all configured models on-demand /** Load method preloads all configured models on-demand
@ -326,7 +324,7 @@ export class Human {
const count = Object.values(this.models).filter((model) => model).length; const count = Object.values(this.models).filter((model) => model).length;
if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config; if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config;
if (env.env.initial) { // print version info on first run and check for correct backend setup if (env.initial) { // print version info on first run and check for correct backend setup
if (this.config.debug) log(`version: ${this.version}`); if (this.config.debug) log(`version: ${this.version}`);
if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`); if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`);
if (!await backend.check(this)) log('error: backend check failed'); if (!await backend.check(this)) log('error: backend check failed');
@ -338,8 +336,8 @@ export class Human {
} }
await models.load(this); // actually loads models await models.load(this); // actually loads models
if (env.env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run if (env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run
env.env.initial = false; env.initial = false;
const loaded = Object.values(this.models).filter((model) => model).length; const loaded = Object.values(this.models).filter((model) => model).length;
if (loaded !== count) { // number of loaded models changed if (loaded !== count) { // number of loaded models changed

View File

@ -10,16 +10,17 @@ import { env } from '../util/env';
import { log, now } from '../util/util'; import { log, now } from '../util/util';
export type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas | typeof Image | typeof env.Canvas; export type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas | typeof Image | typeof env.Canvas;
export type AnyCanvas = HTMLCanvasElement | OffscreenCanvas;
const maxSize = 2048; const maxSize = 2048;
// internal temp canvases // internal temp canvases
let inCanvas: HTMLCanvasElement | OffscreenCanvas | null = null; // use global variable to avoid recreating canvas on each frame let inCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame
let outCanvas: HTMLCanvasElement | OffscreenCanvas | null = null; // use global variable to avoid recreating canvas on each frame let outCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame
let tmpCanvas: HTMLCanvasElement | OffscreenCanvas | null = null; // use global variable to avoid recreating canvas on each frame let tmpCanvas: AnyCanvas | null = null; // use global variable to avoid recreating canvas on each frame
// @ts-ignore // imagefx is js module that should be converted to a class // @ts-ignore // imagefx is js module that should be converted to a class
let fx: fxImage.GLImageFilter | null; // instance of imagefx let fx: fxImage.GLImageFilter | null; // instance of imagefx
export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas { export function canvas(width, height): AnyCanvas {
let c; let c;
if (env.browser) { if (env.browser) {
if (env.offscreen) { if (env.offscreen) {
@ -39,7 +40,7 @@ export function canvas(width, height): HTMLCanvasElement | OffscreenCanvas {
return c; return c;
} }
export function copy(input: HTMLCanvasElement | OffscreenCanvas, output?: HTMLCanvasElement | OffscreenCanvas) { export function copy(input: AnyCanvas, output?: AnyCanvas) {
const outputCanvas = output || canvas(input.width, input.height); const outputCanvas = output || canvas(input.width, input.height);
const ctx = outputCanvas.getContext('2d') as CanvasRenderingContext2D; const ctx = outputCanvas.getContext('2d') as CanvasRenderingContext2D;
ctx.drawImage(input, 0, 0); ctx.drawImage(input, 0, 0);
@ -49,7 +50,7 @@ export function copy(input: HTMLCanvasElement | OffscreenCanvas, output?: HTMLCa
// process input image and return tensor // process input image and return tensor
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement // input can be tensor, imagedata, htmlimageelement, htmlvideoelement
// input is resized and run through imagefx filter // input is resized and run through imagefx filter
export function process(input: Input, config: Config, getTensor: boolean = true): { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null } { export function process(input: Input, config: Config, getTensor: boolean = true): { tensor: Tensor | null, canvas: AnyCanvas | null } {
if (!input) { if (!input) {
// throw new Error('input is missing'); // throw new Error('input is missing');
if (config.debug) log('input is missing'); if (config.debug) log('input is missing');
@ -119,10 +120,10 @@ export function process(input: Input, config: Config, getTensor: boolean = true)
if (config.filter.flip && typeof inCtx.translate !== 'undefined') { if (config.filter.flip && typeof inCtx.translate !== 'undefined') {
inCtx.translate(originalWidth, 0); inCtx.translate(originalWidth, 0);
inCtx.scale(-1, 1); inCtx.scale(-1, 1);
inCtx.drawImage(input as OffscreenCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height); inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults inCtx.setTransform(1, 0, 0, 1, 0, 0); // resets transforms to defaults
} else { } else {
inCtx.drawImage(input as OffscreenCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height); inCtx.drawImage(input as AnyCanvas, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas?.width, inCanvas?.height);
} }
} }
@ -130,7 +131,7 @@ export function process(input: Input, config: Config, getTensor: boolean = true)
// imagefx transforms using gl from input canvas to output canvas // imagefx transforms using gl from input canvas to output canvas
if (config.filter.enabled && env.webgl.supported) { if (config.filter.enabled && env.webgl.supported) {
if (!fx) fx = env.browser ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined') if (!fx) fx = env.browser ? new fxImage.GLImageFilter() : null; // && (typeof document !== 'undefined')
env.filter = !!fx; env.filter = !!fx;
if (!fx) return { tensor: null, canvas: inCanvas }; if (!fx) return { tensor: null, canvas: inCanvas };
fx.reset(); fx.reset();

View File

@ -4,6 +4,7 @@
*/ */
import * as shaders from './imagefxshaders'; import * as shaders from './imagefxshaders';
import { canvas } from './image';
const collect = (source, prefix, collection) => { const collect = (source, prefix, collection) => {
const r = new RegExp('\\b' + prefix + ' \\w+ (\\w+)', 'ig'); const r = new RegExp('\\b' + prefix + ' \\w+ (\\w+)', 'ig');
@ -51,9 +52,9 @@ class GLProgram {
* @property {function} add add specified filter to filter chain * @property {function} add add specified filter to filter chain
* @property {function} apply execute filter chain and draw result * @property {function} apply execute filter chain and draw result
* @property {function} draw just draw input to result * @property {function} draw just draw input to result
* @param {HTMLCanvasElement | OffscreenCanvas} canvas use specific canvas for all webgl bindings
*/ */
export function GLImageFilter(params = {}) {
export function GLImageFilter() {
let drawCount = 0; let drawCount = 0;
let sourceTexture: WebGLTexture | null = null; let sourceTexture: WebGLTexture | null = null;
let lastInChain = false; let lastInChain = false;
@ -62,16 +63,16 @@ export function GLImageFilter(params = {}) {
let filterChain: Record<string, unknown>[] = []; let filterChain: Record<string, unknown>[] = [];
let vertexBuffer: WebGLBuffer | null = null; let vertexBuffer: WebGLBuffer | null = null;
let currentProgram: GLProgram | null = null; let currentProgram: GLProgram | null = null;
const canvas = params['canvas'] || typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(100, 100) : document.createElement('canvas'); const fxcanvas = canvas(100, 100);
const shaderProgramCache = { }; // key is the shader program source, value is the compiled program const shaderProgramCache = { }; // key is the shader program source, value is the compiled program
const DRAW = { INTERMEDIATE: 1 }; const DRAW = { INTERMEDIATE: 1 };
const gl = canvas.getContext('webgl') as WebGLRenderingContext; const gl = fxcanvas.getContext('webgl') as WebGLRenderingContext;
if (!gl) throw new Error('filter: cannot get webgl context'); if (!gl) throw new Error('filter: cannot get webgl context');
function resize(width, height) { function resize(width, height) {
if (width === canvas.width && height === canvas.height) return; // Same width/height? Nothing to do here if (width === fxcanvas.width && height === fxcanvas.height) return; // Same width/height? Nothing to do here
canvas.width = width; fxcanvas.width = width;
canvas.height = height; fxcanvas.height = height;
if (!vertexBuffer) { // Create the context if we don't have it yet if (!vertexBuffer) { // Create the context if we don't have it yet
const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); // Create the vertex buffer for the two triangles [x, y, u, v] * 6 const vertices = new Float32Array([-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0]); // Create the vertex buffer for the two triangles [x, y, u, v] * 6
vertexBuffer = gl.createBuffer(); vertexBuffer = gl.createBuffer();
@ -79,7 +80,7 @@ export function GLImageFilter(params = {}) {
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true); gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
} }
gl.viewport(0, 0, canvas.width, canvas.height); gl.viewport(0, 0, fxcanvas.width, fxcanvas.height);
tempFramebuffers = [null, null]; // Delete old temp framebuffers tempFramebuffers = [null, null]; // Delete old temp framebuffers
} }
@ -102,7 +103,7 @@ export function GLImageFilter(params = {}) {
} }
function getTempFramebuffer(index) { function getTempFramebuffer(index) {
tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(canvas.width, canvas.height); tempFramebuffers[index] = tempFramebuffers[index] || createFramebufferTexture(fxcanvas.width, fxcanvas.height);
return tempFramebuffers[index]; return tempFramebuffers[index];
} }
@ -288,8 +289,8 @@ export function GLImageFilter(params = {}) {
convolution: (matrix) => { // general convolution Filter convolution: (matrix) => { // general convolution Filter
const m = new Float32Array(matrix); const m = new Float32Array(matrix);
const pixelSizeX = 1 / canvas.width; const pixelSizeX = 1 / fxcanvas.width;
const pixelSizeY = 1 / canvas.height; const pixelSizeY = 1 / fxcanvas.height;
const program = compileShader(shaders.convolution); const program = compileShader(shaders.convolution);
gl.uniform1fv(program?.uniform['m'], m); gl.uniform1fv(program?.uniform['m'], m);
gl.uniform2f(program?.uniform['px'], pixelSizeX, pixelSizeY); gl.uniform2f(program?.uniform['px'], pixelSizeX, pixelSizeY);
@ -344,8 +345,8 @@ export function GLImageFilter(params = {}) {
}, },
blur: (size) => { blur: (size) => {
const blurSizeX = (size / 7) / canvas.width; const blurSizeX = (size / 7) / fxcanvas.width;
const blurSizeY = (size / 7) / canvas.height; const blurSizeY = (size / 7) / fxcanvas.height;
const program = compileShader(shaders.blur); const program = compileShader(shaders.blur);
// Vertical // Vertical
gl.uniform2f(program?.uniform['px'], 0, blurSizeY); gl.uniform2f(program?.uniform['px'], 0, blurSizeY);
@ -356,8 +357,8 @@ export function GLImageFilter(params = {}) {
}, },
pixelate: (size) => { pixelate: (size) => {
const blurSizeX = (size) / canvas.width; const blurSizeX = (size) / fxcanvas.width;
const blurSizeY = (size) / canvas.height; const blurSizeY = (size) / fxcanvas.height;
const program = compileShader(shaders.pixelate); const program = compileShader(shaders.pixelate);
gl.uniform2f(program?.uniform['size'], blurSizeX, blurSizeY); gl.uniform2f(program?.uniform['size'], blurSizeX, blurSizeY);
draw(); draw();
@ -399,7 +400,7 @@ export function GLImageFilter(params = {}) {
// @ts-ignore function assigment // @ts-ignore function assigment
f.func.apply(this, f.args || []); f.func.apply(this, f.args || []);
} }
return canvas; return fxcanvas;
}; };
// @ts-ignore this // @ts-ignore this

View File

@ -1,13 +1,13 @@
/** TFJS backend initialization and customization */ /** TFJS backend initialization and customization */
import { log, now } from '../util/util'; import { log, now } from '../util/util';
import { env } from '../util/env';
import * as humangl from './humangl'; import * as humangl from './humangl';
import * as env from '../util/env';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
export async function check(instance, force = false) { export async function check(instance, force = false) {
instance.state = 'backend'; instance.state = 'backend';
if (force || env.env.initial || (instance.config.backend && (instance.config.backend.length > 0) && (tf.getBackend() !== instance.config.backend))) { if (force || env.initial || (instance.config.backend && (instance.config.backend.length > 0) && (tf.getBackend() !== instance.config.backend))) {
const timeStamp = now(); const timeStamp = now();
if (instance.config.backend && instance.config.backend.length > 0) { if (instance.config.backend && instance.config.backend.length > 0) {
@ -18,17 +18,17 @@ export async function check(instance, force = false) {
} }
// force browser vs node backend // force browser vs node backend
if (env.env.browser && instance.config.backend === 'tensorflow') { if (env.browser && instance.config.backend === 'tensorflow') {
if (instance.config.debug) log('override: backend set to tensorflow while running in browser'); if (instance.config.debug) log('override: backend set to tensorflow while running in browser');
instance.config.backend = 'humangl'; instance.config.backend = 'humangl';
} }
if (env.env.node && (instance.config.backend === 'webgl' || instance.config.backend === 'humangl')) { if (env.node && (instance.config.backend === 'webgl' || instance.config.backend === 'humangl')) {
if (instance.config.debug) log(`override: backend set to ${instance.config.backend} while running in nodejs`); if (instance.config.debug) log(`override: backend set to ${instance.config.backend} while running in nodejs`);
instance.config.backend = 'tensorflow'; instance.config.backend = 'tensorflow';
} }
// handle webgpu // handle webgpu
if (env.env.browser && instance.config.backend === 'webgpu') { if (env.browser && instance.config.backend === 'webgpu') {
if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') { if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') {
log('override: backend set to webgpu but browser does not support webgpu'); log('override: backend set to webgpu but browser does not support webgpu');
instance.config.backend = 'humangl'; instance.config.backend = 'humangl';
@ -45,7 +45,7 @@ export async function check(instance, force = false) {
if (!available.includes(instance.config.backend)) { if (!available.includes(instance.config.backend)) {
log(`error: backend ${instance.config.backend} not found in registry`); log(`error: backend ${instance.config.backend} not found in registry`);
instance.config.backend = env.env.node ? 'tensorflow' : 'webgl'; instance.config.backend = env.node ? 'tensorflow' : 'webgl';
if (instance.config.debug) log(`override: setting backend ${instance.config.backend}`); if (instance.config.debug) log(`override: setting backend ${instance.config.backend}`);
} }
@ -75,7 +75,7 @@ export async function check(instance, force = false) {
if (tf.getBackend() === 'humangl') { if (tf.getBackend() === 'humangl') {
tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false); tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
tf.ENV.set('WEBGL_CPU_FORWARD', true); tf.ENV.set('WEBGL_CPU_FORWARD', true);
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', false); // tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', false);
tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true); tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true);
tf.ENV.set('CPU_HANDOFF_SIZE_THRESHOLD', 256); tf.ENV.set('CPU_HANDOFF_SIZE_THRESHOLD', 256);
// if (!instance.config.object.enabled) tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision // if (!instance.config.object.enabled) tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
@ -91,9 +91,9 @@ export async function check(instance, force = false) {
// customize webgpu // customize webgpu
if (tf.getBackend() === 'webgpu') { if (tf.getBackend() === 'webgpu') {
tf.ENV.set('WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD', 512); // tf.ENV.set('WEBGPU_CPU_HANDOFF_SIZE_THRESHOLD', 512);
tf.ENV.set('WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE', 0); // tf.ENV.set('WEBGPU_DEFERRED_SUBMIT_BATCH_SIZE', 0);
tf.ENV.set('WEBGPU_CPU_FORWARD', true); // tf.ENV.set('WEBGPU_CPU_FORWARD', true);
} }
// wait for ready // wait for ready
@ -102,8 +102,7 @@ export async function check(instance, force = false) {
instance.performance.backend = Math.trunc(now() - timeStamp); instance.performance.backend = Math.trunc(now() - timeStamp);
instance.config.backend = tf.getBackend(); instance.config.backend = tf.getBackend();
env.get(); // update env on backend init env.updateBackend(); // update env on backend init
instance.env = env.env;
} }
return true; return true;
} }
@ -121,5 +120,5 @@ export function fakeOps(kernelNames: Array<string>, config) {
}; };
tf.registerKernel(kernelConfig); tf.registerKernel(kernelConfig);
} }
env.env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); // re-scan registered ops env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); // re-scan registered ops
} }

View File

@ -1,189 +1,162 @@
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as image from '../image/image'; import * as image from '../image/image';
import { mergeDeep } from './util';
export type Env = { /** Env class that holds detected capabilities */
export class Env {
/** Running in Browser */ /** Running in Browser */
browser: undefined | boolean, browser: boolean;
/** Running in NodeJS */ /** Running in NodeJS */
node: undefined | boolean, node: boolean;
/** Running in WebWorker thread */ /** Running in WebWorker thread */
worker: undefined | boolean, worker: boolean;
/** Detected platform */ /** Detected platform */
platform: undefined | string, platform: string = '';
/** Detected agent */ /** Detected agent */
agent: undefined | string, agent: string = '';
/** List of supported backends */ /** List of supported backends */
backends: string[], backends: string[] = [];
/** Has any work been performed so far */ /** Has any work been performed so far */
initial: boolean, initial: boolean;
/** Are image filters supported? */ /** Are image filters supported? */
filter: undefined | boolean, filter: boolean | undefined;
/** TFJS instance details */ /** TFJS instance details */
tfjs: { tfjs: {
version: undefined | string, version: undefined | string,
}, };
/** Is offscreenCanvas supported? */ /** Is offscreenCanvas supported? */
offscreen: undefined | boolean, offscreen: undefined | boolean;
/** WASM detected capabilities */ /** WASM detected capabilities */
wasm: { wasm: {
supported: undefined | boolean, supported: undefined | boolean,
backend: undefined | boolean, backend: undefined | boolean,
simd: undefined | boolean, simd: undefined | boolean,
multithread: undefined | boolean, multithread: undefined | boolean,
}, } = {
supported: undefined,
backend: undefined,
simd: undefined,
multithread: undefined,
};
/** WebGL detected capabilities */ /** WebGL detected capabilities */
webgl: { webgl: {
supported: undefined | boolean, supported: undefined | boolean,
backend: undefined | boolean, backend: undefined | boolean,
version: undefined | string, version: undefined | string,
renderer: undefined | string, renderer: undefined | string,
}, } = {
supported: undefined,
backend: undefined,
version: undefined,
renderer: undefined,
};
/** WebGPU detected capabilities */ /** WebGPU detected capabilities */
webgpu: { webgpu: {
supported: undefined | boolean, supported: undefined | boolean,
backend: undefined | boolean, backend: undefined | boolean,
adapter: undefined | string, adapter: undefined | string,
}, } = {
supported: undefined,
backend: undefined,
adapter: undefined,
};
/** CPU info */
cpu: {
model: undefined | string,
flags: string[],
} = {
model: undefined,
flags: [],
};
/** List of supported kernels for current backend */ /** List of supported kernels for current backend */
kernels: string[], kernels: string[] = [];
/** MonkeyPatch for Canvas */ /** MonkeyPatch for Canvas */
Canvas: undefined, Canvas: undefined;
/** MonkeyPatch for Image */ /** MonkeyPatch for Image */
Image: undefined, Image: undefined;
/** MonkeyPatch for ImageData */ /** MonkeyPatch for ImageData */
ImageData: undefined, ImageData: undefined;
}
// eslint-disable-next-line import/no-mutable-exports constructor() {
export let env: Env = { this.browser = typeof navigator !== 'undefined';
browser: undefined, this.node = typeof process !== 'undefined';
node: undefined, this.tfjs = { version: tf.version_core };
worker: undefined, this.offscreen = typeof OffscreenCanvas !== 'undefined';
platform: undefined, this.initial = true;
agent: undefined, // @ts-ignore WorkerGlobalScope evaluated in browser only
initial: true, this.worker = this.browser && this.offscreen ? (typeof WorkerGlobalScope !== 'undefined') : undefined;
backends: [], if (typeof navigator !== 'undefined') {
offscreen: undefined, const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
filter: undefined, if (raw && raw[0]) {
tfjs: { const platformMatch = raw[0].match(/\(([^()]+)\)/g);
version: undefined, this.platform = (platformMatch && platformMatch[0]) ? platformMatch[0].replace(/\(|\)/g, '') : '';
}, this.agent = navigator.userAgent.replace(raw[0], '');
wasm: { if (this.platform[1]) this.agent = this.agent.replace(raw[1], '');
supported: undefined, this.agent = this.agent.replace(/ /g, ' ');
backend: undefined, // chrome offscreencanvas gpu memory leak
simd: undefined, /*
multithread: undefined, const isChrome = env.agent.match(/Chrome\/.[0-9]/g);
}, const verChrome = isChrome && isChrome[0] ? isChrome[0].split('/')[1] : 0;
webgl: { if (verChrome > 92 && verChrome < 96) {
supported: undefined, log('disabling offscreenCanvas due to browser error:', isChrome ? isChrome[0] : 'unknown');
backend: undefined, this.offscreen = false;
version: undefined,
renderer: undefined,
},
webgpu: {
supported: undefined,
backend: undefined,
adapter: undefined,
},
kernels: [],
Canvas: undefined,
Image: undefined,
ImageData: undefined,
};
export async function cpuInfo() {
const cpu = { model: '', flags: [] };
if (env.node && env.platform?.startsWith('linux')) {
// eslint-disable-next-line global-require
const fs = require('fs');
try {
const data = fs.readFileSync('/proc/cpuinfo').toString();
for (const line of data.split('\n')) {
if (line.startsWith('model name')) {
cpu.model = line.match(/:(.*)/g)[0].replace(':', '').trim();
}
if (line.startsWith('flags')) {
cpu.flags = line.match(/:(.*)/g)[0].replace(':', '').trim().split(' ').sort();
} }
*/
} }
} catch { /**/ } } else if (typeof process !== 'undefined') {
} this.platform = `${process.platform} ${process.arch}`;
if (!env['cpu']) Object.defineProperty(env, 'cpu', { value: cpu }); this.agent = `NodeJS ${process.version}`;
else env['cpu'] = cpu;
}
export async function backendInfo() {
// analyze backends
env.backends = Object.keys(tf.engine().registryFactory);
env.wasm.supported = typeof WebAssembly !== 'undefined';
env.wasm.backend = env.backends.includes('wasm');
if (env.wasm.supported && env.wasm.backend && tf.getBackend() === 'wasm') {
env.wasm.simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
env.wasm.multithread = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
}
const c = image.canvas(100, 100);
const ctx = c ? c.getContext('webgl2') : undefined; // causes too many gl contexts
// const ctx = typeof tf.backend().getGPGPUContext !== undefined ? tf.backend().getGPGPUContext : null;
env.webgl.supported = typeof ctx !== 'undefined';
env.webgl.backend = env.backends.includes('webgl');
if (env.webgl.supported && env.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {
// @ts-ignore getGPGPUContext only exists on WebGL backend
const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null;
if (gl) {
env.webgl.version = gl.getParameter(gl.VERSION);
env.webgl.renderer = gl.getParameter(gl.RENDERER);
} }
} }
env.webgpu.supported = env.browser && typeof navigator['gpu'] !== 'undefined'; async updateBackend() {
env.webgpu.backend = env.backends.includes('webgpu'); // analyze backends
if (env.webgpu.supported) env.webgpu.adapter = (await navigator['gpu'].requestAdapter())?.name; this.backends = Object.keys(tf.engine().registryFactory);
this.wasm.supported = typeof WebAssembly !== 'undefined';
// enumerate kernels this.wasm.backend = this.backends.includes('wasm');
env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase()); if (this.wasm.supported && this.wasm.backend && tf.getBackend() === 'wasm') {
} this.wasm.simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
this.wasm.multithread = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
export async function get() {
env.browser = typeof navigator !== 'undefined';
env.node = typeof process !== 'undefined';
env.tfjs.version = tf.version_core;
// offscreencanvas supported?
env.offscreen = typeof env.offscreen === 'undefined' ? typeof OffscreenCanvas !== 'undefined' : env.offscreen;
// get platform and agent
if (typeof navigator !== 'undefined') {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
env.platform = (platformMatch && platformMatch[0]) ? platformMatch[0].replace(/\(|\)/g, '') : '';
env.agent = navigator.userAgent.replace(raw[0], '');
if (env.platform[1]) env.agent = env.agent.replace(raw[1], '');
env.agent = env.agent.replace(/ /g, ' ');
// chrome offscreencanvas gpu memory leak
/*
const isChrome = env.agent.match(/Chrome\/.[0-9]/g);
const verChrome = isChrome && isChrome[0] ? isChrome[0].split('/')[1] : 0;
if (verChrome > 0 && verChrome > 92 && verChrome < 96) {
log('disabling offscreenCanvas due to browser error:', isChrome ? isChrome[0] : 'unknown');
env.offscreen = false;
}
*/
} }
} else if (typeof process !== 'undefined') { const c = image.canvas(100, 100);
env.platform = `${process.platform} ${process.arch}`; const ctx = c ? c.getContext('webgl2') : undefined; // causes too many gl contexts
env.agent = `NodeJS ${process.version}`; // const ctx = typeof tf.backend().getGPGPUContext !== undefined ? tf.backend().getGPGPUContext : null;
this.webgl.supported = typeof ctx !== 'undefined';
this.webgl.backend = this.backends.includes('webgl');
if (this.webgl.supported && this.webgl.backend && (tf.getBackend() === 'webgl' || tf.getBackend() === 'humangl')) {
// @ts-ignore getGPGPUContext only exists on WebGL backend
const gl = tf.backend().gpgpu !== 'undefined' ? await tf.backend().getGPGPUContext().gl : null;
if (gl) {
this.webgl.version = gl.getParameter(gl.VERSION);
this.webgl.renderer = gl.getParameter(gl.RENDERER);
}
}
this.webgpu.supported = this.browser && typeof navigator['gpu'] !== 'undefined';
this.webgpu.backend = this.backends.includes('webgpu');
if (this.webgpu.supported) this.webgpu.adapter = (await navigator['gpu'].requestAdapter())?.name;
// enumerate kernels
this.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
} }
// @ts-ignore WorkerGlobalScope evaluated in browser only
env.worker = env.browser && env.offscreen ? (typeof WorkerGlobalScope !== 'undefined') : undefined;
await backendInfo();
// get cpu info async updateCPU() {
// await cpuInfo(); const cpu = { model: '', flags: [] };
if (this.node && this.platform?.startsWith('linux')) {
// eslint-disable-next-line global-require
const fs = require('fs');
try {
const data = fs.readFileSync('/proc/cpuinfo').toString();
for (const line of data.split('\n')) {
if (line.startsWith('model name')) {
cpu.model = line.match(/:(.*)/g)[0].replace(':', '').trim();
}
if (line.startsWith('flags')) {
cpu.flags = line.match(/:(.*)/g)[0].replace(':', '').trim().split(' ').sort();
}
}
} catch { /**/ }
}
if (!this['cpu']) Object.defineProperty(this, 'cpu', { value: cpu });
else this['cpu'] = cpu;
}
} }
export async function set(obj) { export const env = new Env();
env = mergeDeep(env, obj);
}

File diff suppressed because it is too large Load Diff

View File

@ -3,4 +3,11 @@
* @external * @external
*/ */
import * as tf from '../../tfjs/dist/tfjs.esm';
// eslint-disable-next-line import/export
export * from '../../tfjs/dist/tfjs.esm'; export * from '../../tfjs/dist/tfjs.esm';
// needs override
// eslint-disable-next-line import/export
export const version_core = tf.version['tfjs-core'];