mirror of https://github.com/vladmandic/human
add movenet-multipose workaround
parent
cca0102bbc
commit
c8571ad8e2
|
@ -9,7 +9,7 @@
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
### **HEAD -> main** 2023/02/10 mandic00@live.com
|
### **HEAD -> main** 2023/02/13 mandic00@live.com
|
||||||
|
|
||||||
- add face.detector.minsize configurable setting
|
- add face.detector.minsize configurable setting
|
||||||
- add affectnet
|
- add affectnet
|
||||||
|
|
23
TODO.md
23
TODO.md
|
@ -2,21 +2,6 @@
|
||||||
|
|
||||||
## Work-in-Progress
|
## Work-in-Progress
|
||||||
|
|
||||||
|
|
||||||
<hr><br>
|
|
||||||
|
|
||||||
## Exploring
|
|
||||||
|
|
||||||
- **Optical flow** for intelligent temporal interpolation
|
|
||||||
<https://docs.opencv.org/3.3.1/db/d7f/tutorial_js_lucas_kanade.html>
|
|
||||||
- **CLAHE** advanced histogram equalization for optimization of badly lit scenes
|
|
||||||
- **TFLite** models
|
|
||||||
<https://js.tensorflow.org/api_tflite/0.0.1-alpha.4/>
|
|
||||||
- **Principal Components Analysis** for reduction of descriptor complexity
|
|
||||||
<https://github.com/vladmandic/human-match/blob/main/src/pca.js>
|
|
||||||
- **Temporal guidance** for face/body segmentation
|
|
||||||
<https://github.com/PeterL1n/RobustVideoMatting>
|
|
||||||
|
|
||||||
<hr><br>
|
<hr><br>
|
||||||
|
|
||||||
## Known Issues & Limitations
|
## Known Issues & Limitations
|
||||||
|
@ -31,6 +16,10 @@ No issues with default model `FaceMesh`
|
||||||
`NanoDet` model is not supported when using `WASM` backend due to missing kernel op in **TFJS**
|
`NanoDet` model is not supported when using `WASM` backend due to missing kernel op in **TFJS**
|
||||||
No issues with default model `MB3-CenterNet`
|
No issues with default model `MB3-CenterNet`
|
||||||
|
|
||||||
|
## Body Detection using MoveNet-MultiPose
|
||||||
|
|
||||||
|
Model does not return valid detection scores (all other functionality is not impacted)
|
||||||
|
|
||||||
### WebGPU
|
### WebGPU
|
||||||
|
|
||||||
Experimental support only until support is officially added in Chromium
|
Experimental support only until support is officially added in Chromium
|
||||||
|
@ -47,6 +36,10 @@ Enable via `about:config` -> `gfx.offscreencanvas.enabled`
|
||||||
No support for running in **web workers** as Safari still does not support `OffscreenCanvas`
|
No support for running in **web workers** as Safari still does not support `OffscreenCanvas`
|
||||||
[Details](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas#browser_compatibility)
|
[Details](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas#browser_compatibility)
|
||||||
|
|
||||||
|
## React-Native
|
||||||
|
|
||||||
|
`Human` support for **React-Native** is best-effort, but not part of the main development focus
|
||||||
|
|
||||||
<hr><br>
|
<hr><br>
|
||||||
|
|
||||||
## Pending Release Changes
|
## Pending Release Changes
|
||||||
|
|
|
@ -4,6 +4,6 @@
|
||||||
author: <https://github.com/vladmandic>'
|
author: <https://github.com/vladmandic>'
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import*as m from"../../dist/human.esm.js";var v=1920,b={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!0},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(b);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;e.draw.options.drawPoints=!0;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+`
|
import*as m from"../../dist/human.esm.js";var v=1920,b={modelBasePath:"../../models",cacheSensitivity:0,filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!0},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(b);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;e.draw.options.drawPoints=!0;var t={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...a)=>{t.log.innerText+=a.join(" ")+`
|
||||||
`,console.log(...t)},r=t=>a.fps.innerText=t,g=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function f(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(f)}async function u(){var d,i,c;if(!a.video.paused){let l=e.next(e.result),p=await e.image(a.video);e.draw.canvas(p.canvas,a.canvas);let w={bodyLabels:`person confidence [score] and ${(c=(i=(d=e.result)==null?void 0:d.body)==null?void 0:i[0])==null?void 0:c.keypoints.length} keypoints`};await e.draw.all(a.canvas,l,w),g(l.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,r(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(u,30)}async function y(){let d=(await e.webcam.enumerate())[0].deviceId;await e.webcam.start({element:a.video,crop:!1,width:v,id:d}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function h(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.models.stats()),o("models loaded:",e.models.loaded()),o("environment",e.env),r("initializing..."),await e.warmup(),await y(),await f(),await u()}window.onload=h;
|
`,console.log(...a)},r=a=>t.fps.innerText=a,g=a=>t.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function f(){if(!t.video.paused){n.start===0&&(n.start=e.now()),await e.detect(t.video);let a=e.tf.memory().numTensors;a-n.tensors!==0&&o("allocated tensors:",a-n.tensors),n.tensors=a,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!t.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(f)}async function u(){var d,i,c;if(!t.video.paused){let l=e.next(e.result),p=await e.image(t.video);e.draw.canvas(p.canvas,t.canvas);let w={bodyLabels:`person confidence [score] and ${(c=(i=(d=e.result)==null?void 0:d.body)==null?void 0:i[0])==null?void 0:c.keypoints.length} keypoints`};await e.draw.all(t.canvas,l,w),g(l.performance)}let a=e.now();s.drawFPS=Math.round(1e3*1e3/(a-n.draw))/1e3,n.draw=a,r(t.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(u,30)}async function y(){let d=(await e.webcam.enumerate())[0].deviceId;await e.webcam.start({element:t.video,crop:!1,width:v,id:d}),t.canvas.width=e.webcam.width,t.canvas.height=e.webcam.height,t.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function h(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.models.stats()),o("models loaded:",e.models.loaded()),o("environment",e.env),r("initializing..."),await e.warmup(),await y(),await f(),await u()}window.onload=h;
|
||||||
//# sourceMappingURL=index.js.map
|
//# sourceMappingURL=index.js.map
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -14,13 +14,11 @@ const width = 1920; // used by webcam config as well as human maximum resultion
|
||||||
const humanConfig: Partial<H.Config> = { // user configuration for human, used to fine-tune behavior
|
const humanConfig: Partial<H.Config> = { // user configuration for human, used to fine-tune behavior
|
||||||
// backend: 'webgpu',
|
// backend: 'webgpu',
|
||||||
modelBasePath: '../../models',
|
modelBasePath: '../../models',
|
||||||
|
cacheSensitivity: 0,
|
||||||
filter: { enabled: true, equalization: false, flip: false },
|
filter: { enabled: true, equalization: false, flip: false },
|
||||||
face: { enabled: true, detector: { rotation: true }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },
|
face: { enabled: true, detector: { rotation: true }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },
|
||||||
body: { enabled: true },
|
body: { enabled: true },
|
||||||
hand: { enabled: true },
|
hand: { enabled: true },
|
||||||
// face: { enabled: true, mesh: { enabled: false } },
|
|
||||||
// body: { enabled: false },
|
|
||||||
// hand: { enabled: false },
|
|
||||||
object: { enabled: false },
|
object: { enabled: false },
|
||||||
segmentation: { enabled: false },
|
segmentation: { enabled: false },
|
||||||
gesture: { enabled: true },
|
gesture: { enabled: true },
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -44266,6 +44266,7 @@ function parseSinglePose(res, config3, image) {
|
||||||
return bodies;
|
return bodies;
|
||||||
}
|
}
|
||||||
function parseMultiPose(res, config3, image) {
|
function parseMultiPose(res, config3, image) {
|
||||||
|
config3.body.minConfidence = -1;
|
||||||
const bodies = [];
|
const bodies = [];
|
||||||
for (let id2 = 0; id2 < res[0].length; id2++) {
|
for (let id2 = 0; id2 < res[0].length; id2++) {
|
||||||
const kpt4 = res[0][id2];
|
const kpt4 = res[0][id2];
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -81,6 +81,7 @@ function parseSinglePose(res, config, image) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseMultiPose(res, config, image) {
|
function parseMultiPose(res, config, image) {
|
||||||
|
config.body.minConfidence = -1; // movenet-multipose return incorrect scores
|
||||||
const bodies: BodyResult[] = [];
|
const bodies: BodyResult[] = [];
|
||||||
for (let id = 0; id < res[0].length; id++) {
|
for (let id = 0; id < res[0].length; id++) {
|
||||||
const kpt = res[0][id];
|
const kpt = res[0][id];
|
||||||
|
@ -135,39 +136,6 @@ export async function predict(input: Tensor, config: Config): Promise<BodyResult
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const t: Record<string, Tensor> = {};
|
const t: Record<string, Tensor> = {};
|
||||||
skipped = 0;
|
skipped = 0;
|
||||||
// run detection on squared input and cached boxes
|
|
||||||
/*
|
|
||||||
cache.bodies = []; // reset bodies result
|
|
||||||
if (cache.boxes.length >= (config.body.maxDetected || 0)) { // if we have enough cached boxes run detection using cache
|
|
||||||
for (let i = 0; i < cache.boxes.length; i++) { // run detection based on cached boxes
|
|
||||||
t.crop = tf.image.cropAndResize(input, [cache.boxes[i]], [0], [inputSize, inputSize], 'bilinear');
|
|
||||||
t.cast = tf.cast(t.crop, 'int32');
|
|
||||||
// t.input = prepareImage(input);
|
|
||||||
t.res = model?.execute(t.cast) as Tensor;
|
|
||||||
const res = await t.res.array();
|
|
||||||
const newBodies = (t.res.shape[2] === 17) ? await parseSinglePose(res, config, input, cache.boxes[i]) : await parseMultiPose(res, config, input, cache.boxes[i]);
|
|
||||||
cache.bodies = cache.bodies.concat(newBodies);
|
|
||||||
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (cache.bodies.length !== config.body.maxDetected) { // did not find enough bodies based on cached boxes so run detection on full frame
|
|
||||||
t.input = prepareImage(input);
|
|
||||||
t.res = model?.execute(t.input) as Tensor;
|
|
||||||
const res = await t.res.array();
|
|
||||||
cache.bodies = (t.res.shape[2] === 17) ? await parseSinglePose(res, config, input, [0, 0, 1, 1]) : await parseMultiPose(res, config, input, [0, 0, 1, 1]);
|
|
||||||
for (const body of cache.bodies) rescaleBody(body, [input.shape[2] || 1, input.shape[1] || 1]);
|
|
||||||
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
|
|
||||||
}
|
|
||||||
cache.boxes.length = 0; // reset cache
|
|
||||||
for (let i = 0; i < cache.bodies.length; i++) {
|
|
||||||
if (cache.bodies[i].keypoints.length > (coords.kpt.length / 2)) { // only update cache if we detected at least half keypoints
|
|
||||||
const scaledBox = box.scale(cache.bodies[i].boxRaw, boxExpandFact);
|
|
||||||
const cropBox = box.crop(scaledBox);
|
|
||||||
cache.boxes.push(cropBox);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
// run detection on squared input and no cached boxes
|
// run detection on squared input and no cached boxes
|
||||||
t.input = fix.padInput(input, inputSize);
|
t.input = fix.padInput(input, inputSize);
|
||||||
t.res = model?.execute(t.input) as Tensor;
|
t.res = model?.execute(t.input) as Tensor;
|
||||||
|
|
100
test/build.log
100
test/build.log
|
@ -1,50 +1,50 @@
|
||||||
2023-02-13 06:23:54 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.5"}
|
2023-02-13 10:14:55 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.5"}
|
||||||
2023-02-13 06:23:54 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.5"}
|
2023-02-13 10:14:55 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.5"}
|
||||||
2023-02-13 06:23:54 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
2023-02-13 10:14:55 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||||
2023-02-13 06:23:54 [36mINFO: [39m Toolchain: {"build":"0.8.2","esbuild":"0.17.8","typescript":"4.9.5","typedoc":"0.23.25","eslint":"8.34.0"}
|
2023-02-13 10:14:55 [36mINFO: [39m Toolchain: {"build":"0.8.2","esbuild":"0.17.8","typescript":"4.9.5","typedoc":"0.23.25","eslint":"8.34.0"}
|
||||||
2023-02-13 06:23:54 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
2023-02-13 10:14:55 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
2023-02-13 10:14:55 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":80,"inputBytes":674249,"outputBytes":318951}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":80,"inputBytes":672409,"outputBytes":318975}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":80,"inputBytes":674253,"outputBytes":318955}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":80,"inputBytes":672413,"outputBytes":318979}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":80,"inputBytes":675201,"outputBytes":319066}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":80,"inputBytes":673361,"outputBytes":319090}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":80,"inputBytes":673995,"outputBytes":317523}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":80,"inputBytes":672155,"outputBytes":317547}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1151306}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1151306}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":80,"inputBytes":1824631,"outputBytes":1464766}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":80,"inputBytes":1822791,"outputBytes":1464790}
|
||||||
2023-02-13 06:23:54 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":80,"inputBytes":1824631,"outputBytes":1931691}
|
2023-02-13 10:14:55 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":80,"inputBytes":1822791,"outputBytes":1931726}
|
||||||
2023-02-13 06:23:58 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
2023-02-13 10:15:00 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||||
2023-02-13 06:24:01 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":81,"generated":true}
|
2023-02-13 10:15:02 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":81,"generated":true}
|
||||||
2023-02-13 06:24:01 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6235,"outputBytes":2901}
|
2023-02-13 10:15:02 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6140,"outputBytes":2920}
|
||||||
2023-02-13 06:24:01 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17503,"outputBytes":9403}
|
2023-02-13 10:15:02 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17503,"outputBytes":9403}
|
||||||
2023-02-13 06:24:11 [35mSTATE:[39m Lint: {"locations":["**/*.json","src/**/*.ts","test/**/*.js","demo/**/*.js","**/*.md"],"files":170,"errors":0,"warnings":0}
|
2023-02-13 10:15:12 [35mSTATE:[39m Lint: {"locations":["**/*.json","src/**/*.ts","test/**/*.js","demo/**/*.js","**/*.md"],"files":170,"errors":0,"warnings":0}
|
||||||
2023-02-13 06:24:11 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
2023-02-13 10:15:12 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||||
2023-02-13 06:24:11 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs-core.d.ts","output":"types/tfjs-core.d.ts"}
|
2023-02-13 10:15:12 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs-core.d.ts","output":"types/tfjs-core.d.ts"}
|
||||||
2023-02-13 06:24:11 [36mINFO: [39m Done...
|
2023-02-13 10:15:12 [36mINFO: [39m Done...
|
||||||
2023-02-13 06:24:11 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs.d.ts","output":"types/tfjs.esm.d.ts"}
|
2023-02-13 10:15:13 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs.d.ts","output":"types/tfjs.esm.d.ts"}
|
||||||
2023-02-13 06:24:11 [35mSTATE:[39m Copy: {"input":"src/types/tsconfig.json","output":"types/tsconfig.json"}
|
2023-02-13 10:15:13 [35mSTATE:[39m Copy: {"input":"src/types/tsconfig.json","output":"types/tsconfig.json"}
|
||||||
2023-02-13 06:24:11 [35mSTATE:[39m Copy: {"input":"src/types/eslint.json","output":"types/.eslintrc.json"}
|
2023-02-13 10:15:13 [35mSTATE:[39m Copy: {"input":"src/types/eslint.json","output":"types/.eslintrc.json"}
|
||||||
2023-02-13 06:24:11 [35mSTATE:[39m Copy: {"input":"src/types/tfjs.esm.d.ts","output":"dist/tfjs.esm.d.ts"}
|
2023-02-13 10:15:13 [35mSTATE:[39m Copy: {"input":"src/types/tfjs.esm.d.ts","output":"dist/tfjs.esm.d.ts"}
|
||||||
2023-02-13 06:24:11 [35mSTATE:[39m Filter: {"input":"types/tfjs-core.d.ts"}
|
2023-02-13 10:15:13 [35mSTATE:[39m Filter: {"input":"types/tfjs-core.d.ts"}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":210}
|
2023-02-13 10:15:14 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":210}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
2023-02-13 10:15:14 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Write: {"output":"dist/human.esm-nobundle.d.ts"}
|
2023-02-13 10:15:14 [35mSTATE:[39m Write: {"output":"dist/human.esm-nobundle.d.ts"}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Write: {"output":"dist/human.esm.d.ts"}
|
2023-02-13 10:15:14 [35mSTATE:[39m Write: {"output":"dist/human.esm.d.ts"}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Write: {"output":"dist/human.d.ts"}
|
2023-02-13 10:15:14 [35mSTATE:[39m Write: {"output":"dist/human.d.ts"}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Write: {"output":"dist/human.node-gpu.d.ts"}
|
2023-02-13 10:15:14 [35mSTATE:[39m Write: {"output":"dist/human.node-gpu.d.ts"}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Write: {"output":"dist/human.node.d.ts"}
|
2023-02-13 10:15:14 [35mSTATE:[39m Write: {"output":"dist/human.node.d.ts"}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Write: {"output":"dist/human.node-wasm.d.ts"}
|
2023-02-13 10:15:14 [35mSTATE:[39m Write: {"output":"dist/human.node-wasm.d.ts"}
|
||||||
2023-02-13 06:24:12 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
2023-02-13 10:15:14 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
2023-02-13 10:15:14 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Models {"folder":"../human-models/models","models":44}
|
2023-02-13 10:15:14 [35mSTATE:[39m Models {"folder":"../human-models/models","models":44}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
2023-02-13 10:15:14 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
2023-02-13 10:15:14 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
2023-02-13 10:15:14 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
2023-02-13 10:15:14 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
2023-02-13 10:15:14 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||||
2023-02-13 06:24:12 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
2023-02-13 10:15:14 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||||
2023-02-13 06:24:13 [35mSTATE:[39m Models: {"count":58,"totalSize":380063249}
|
2023-02-13 10:15:15 [35mSTATE:[39m Models: {"count":58,"totalSize":380063249}
|
||||||
2023-02-13 06:24:13 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
2023-02-13 10:15:15 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||||
|
|
Loading…
Reference in New Issue