mirror of https://github.com/vladmandic/human
added webcam id specification
parent
009af80f1d
commit
4e418a803c
|
@ -427,7 +427,7 @@ For more info, see [**Configuration Details**](https://github.com/vladmandic/hum
|
|||
|
||||
<br><hr><br>
|
||||
|
||||
`Human` library is written in [TypeScript](https://www.typescriptlang.org/docs/handbook/intro.html) **4.8** using [TensorFlow/JS](https://www.tensorflow.org/js/) **4.0** and conforming to latest `JavaScript` [ECMAScript version 2022](https://262.ecma-international.org/) standard
|
||||
`Human` library is written in [TypeScript](https://www.typescriptlang.org/docs/handbook/intro.html) **4.9** using [TensorFlow/JS](https://www.tensorflow.org/js/) **4.0** and conforming to latest `JavaScript` [ECMAScript version 2022](https://262.ecma-international.org/) standard
|
||||
|
||||
Build target for distributables is `JavaScript` [EMCAScript version 2018](https://262.ecma-international.org/9.0/)
|
||||
|
||||
|
|
6
TODO.md
6
TODO.md
|
@ -67,13 +67,17 @@ Features:
|
|||
Which significantly increases performance and precision in poorly lit scenes
|
||||
- Improved `config.filter.equalization` (*disabled by default*)
|
||||
Image and video on-demand histogram equalization
|
||||
- Support selecting specific video source when multiple cameras are present
|
||||
See `human.webcam.enumerate()`
|
||||
|
||||
Architecture:
|
||||
- Reduce build dependencies
|
||||
`Human` is now 30% smaller :)
|
||||
As usual, `Human` has **zero** runtime dependencies,
|
||||
all *devDependencies* are only to rebuild `Human` itself
|
||||
- Upgrade to TFJS 4.0 with **strong typing**
|
||||
- Upgrade to **TFJS 4.0** with **strong typing**
|
||||
see [notes](https://github.com/vladmandic/human#typedefs) on how to use
|
||||
- `TypeDef` refactoring
|
||||
- Add named export for improved bundler support when using non-default imports
|
||||
- Support for `NodeJS` v19
|
||||
- Upgrade to **TypeScript 4.9**
|
||||
|
|
|
@ -4,6 +4,6 @@
|
|||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
|
||||
import*as m from"../../dist/human.esm.js";var f=1920,g={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1,width:f},face:{enabled:!0,detector:{rotation:!0},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(g);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},o={detectFPS:0,drawFPS:0,frames:0,averageMs:0},s=(...t)=>{a.log.innerText+=t.join(" ")+`
|
||||
`,console.log(...t)},r=t=>a.fps.innerText=t,b=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function u(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&s("allocated tensors:",t-n.tensors),n.tensors=t,o.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,o.frames++,o.averageMs=Math.round(1e3*(e.now()-n.start)/o.frames)/1e3,o.frames%100===0&&!a.video.paused&&s("performance",{...o,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(u)}async function p(){var d,i,l;if(!a.video.paused){let c=e.next(e.result),w=await e.image(a.video);e.draw.canvas(w.canvas,a.canvas);let v={bodyLabels:`person confidence [score] and ${(l=(i=(d=e.result)==null?void 0:d.body)==null?void 0:i[0])==null?void 0:l.keypoints.length} keypoints`};await e.draw.all(a.canvas,c,v),b(c.performance)}let t=e.now();o.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,r(a.video.paused?"paused":`fps: ${o.detectFPS.toFixed(1).padStart(5," ")} detect | ${o.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(p,30)}async function h(){await e.webcam.start({element:a.video,crop:!0,width:f}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function y(){s("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),s("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),s("backend:",e.tf.getBackend(),"| available:",e.env.backends),s("models stats:",e.getModelStats()),s("models loaded:",Object.values(e.models).filter(t=>t!==null).length),s("environment",e.env),r("initializing..."),await e.warmup(),await h(),await u(),await p()}window.onload=y;
|
||||
import*as m from"../../dist/human.esm.js";var f=1920,b={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1,width:f},face:{enabled:!0,detector:{rotation:!0},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!0},hand:{enabled:!1},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(b);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+`
|
||||
`,console.log(...t)},r=t=>a.fps.innerText=t,g=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function u(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(u)}async function p(){var d,i,c;if(!a.video.paused){let l=e.next(e.result),w=await e.image(a.video);e.draw.canvas(w.canvas,a.canvas);let v={bodyLabels:`person confidence [score] and ${(c=(i=(d=e.result)==null?void 0:d.body)==null?void 0:i[0])==null?void 0:c.keypoints.length} keypoints`};await e.draw.all(a.canvas,l,v),g(l.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,r(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(p,30)}async function h(){let d=(await e.webcam.enumerate())[0].deviceId;await e.webcam.start({element:a.video,crop:!0,width:f,id:d}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function y(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.getModelStats()),o("models loaded:",Object.values(e.models).filter(t=>t!==null).length),o("environment",e.env),r("initializing..."),await e.warmup(),await h(),await u(),await p()}window.onload=y;
|
||||
//# sourceMappingURL=index.js.map
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -17,7 +17,8 @@ const humanConfig: Partial<H.Config> = { // user configuration for human, used t
|
|||
filter: { enabled: true, equalization: false, flip: false, width },
|
||||
face: { enabled: true, detector: { rotation: true }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },
|
||||
body: { enabled: true },
|
||||
hand: { enabled: true },
|
||||
// hand: { enabled: true },
|
||||
hand: { enabled: false },
|
||||
object: { enabled: false },
|
||||
segmentation: { enabled: false },
|
||||
gesture: { enabled: true },
|
||||
|
@ -82,7 +83,9 @@ async function drawLoop() { // main screen refresh loop
|
|||
}
|
||||
|
||||
async function webCam() {
|
||||
await human.webcam.start({ element: dom.video, crop: true, width }); // use human webcam helper methods and associate webcam stream with a dom element
|
||||
const devices = await human.webcam.enumerate();
|
||||
const id = devices[0].deviceId; // use first available video source
|
||||
await human.webcam.start({ element: dom.video, crop: true, width, id }); // use human webcam helper methods and associate webcam stream with a dom element
|
||||
dom.canvas.width = human.webcam.width;
|
||||
dom.canvas.height = human.webcam.height;
|
||||
dom.canvas.onclick = async () => { // pause when clicked on screen and resume on next click
|
||||
|
|
10
package.json
10
package.json
|
@ -85,13 +85,13 @@
|
|||
"@tensorflow/tfjs-node-gpu": "^4.0.0",
|
||||
"@types/node": "^18.11.9",
|
||||
"@types/offscreencanvas": "^2019.7.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.42.1",
|
||||
"@typescript-eslint/parser": "^5.42.1",
|
||||
"@typescript-eslint/eslint-plugin": "^5.43.0",
|
||||
"@typescript-eslint/parser": "^5.43.0",
|
||||
"@vladmandic/build": "^0.7.14",
|
||||
"@vladmandic/pilogger": "^0.4.6",
|
||||
"@vladmandic/tfjs": "github:vladmandic/tfjs",
|
||||
"canvas": "^2.10.2",
|
||||
"esbuild": "^0.15.13",
|
||||
"esbuild": "^0.15.14",
|
||||
"eslint": "8.27.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-plugin-html": "^7.1.0",
|
||||
|
@ -101,7 +101,7 @@
|
|||
"eslint-plugin-promise": "^6.1.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"tslib": "^2.4.1",
|
||||
"typedoc": "0.23.20",
|
||||
"typescript": "4.8.4"
|
||||
"typedoc": "0.23.21",
|
||||
"typescript": "4.9.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,56 +10,38 @@ import type { BodyKeypoint, BodyResult, BodyLandmark, Box, Point, BodyAnnotation
|
|||
import type { GraphModel, Tensor, Tensor4D } from '../tfjs/types';
|
||||
import type { Config } from '../config';
|
||||
import * as coords from './blazeposecoords';
|
||||
import * as detect from './blazeposedetector';
|
||||
import { loadDetector, detectBoxes, DetectedBox } from './blazeposedetector';
|
||||
import * as box from '../util/box';
|
||||
import { env } from '../util/env';
|
||||
|
||||
const env = { initial: true };
|
||||
// const models: [GraphModel | null, GraphModel | null] = [null, null];
|
||||
const models: { detector: GraphModel | null, landmarks: GraphModel | null } = { detector: null, landmarks: null };
|
||||
const inputSize: { detector: [number, number], landmarks: [number, number] } = { detector: [224, 224], landmarks: [256, 256] };
|
||||
let model: GraphModel | null;
|
||||
let inputSize = 256;
|
||||
let skipped = Number.MAX_SAFE_INTEGER;
|
||||
const outputNodes: { detector: string[], landmarks: string[] } = {
|
||||
landmarks: ['ld_3d', 'activation_segmentation', 'activation_heatmap', 'world_3d', 'output_poseflag'],
|
||||
detector: [],
|
||||
};
|
||||
|
||||
let cache: BodyResult | null = null;
|
||||
let cropBox: Box | undefined;
|
||||
const cache: BodyResult[] = [];
|
||||
let padding: [number, number][] = [[0, 0], [0, 0], [0, 0], [0, 0]];
|
||||
let lastTime = 0;
|
||||
|
||||
const sigmoid = (x) => (1 - (1 / (1 + Math.exp(x))));
|
||||
|
||||
export async function loadDetect(config: Config): Promise<GraphModel> {
|
||||
if (env.initial) models.detector = null;
|
||||
if (!models.detector && config.body['detector'] && config.body['detector'].modelPath || '') {
|
||||
models.detector = await loadModel(config.body['detector'].modelPath);
|
||||
const inputs = models.detector?.['executor'] ? Object.values(models.detector.modelSignature['inputs']) : undefined;
|
||||
inputSize.detector[0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;
|
||||
inputSize.detector[1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;
|
||||
} else if (config.debug && models.detector) log('cached model:', models.detector['modelUrl']);
|
||||
detect.createAnchors();
|
||||
return models.detector as GraphModel;
|
||||
}
|
||||
export const loadDetect = (config: Config): Promise<GraphModel> => loadDetector(config);
|
||||
|
||||
export async function loadPose(config: Config): Promise<GraphModel> {
|
||||
if (env.initial) models.landmarks = null;
|
||||
if (!models.landmarks) {
|
||||
models.landmarks = await loadModel(config.body.modelPath);
|
||||
const inputs = models.landmarks?.['executor'] ? Object.values(models.landmarks.modelSignature['inputs']) : undefined;
|
||||
inputSize.landmarks[0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;
|
||||
inputSize.landmarks[1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;
|
||||
} else if (config.debug) log('cached model:', models.landmarks['modelUrl']);
|
||||
return models.landmarks;
|
||||
if (env.initial) model = null;
|
||||
if (!model) {
|
||||
model = await loadModel(config.body.modelPath);
|
||||
const inputs = model?.['executor'] ? Object.values(model.modelSignature['inputs']) : undefined;
|
||||
inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;
|
||||
} else if (config.debug) log('cached model:', model['modelUrl']);
|
||||
return model;
|
||||
}
|
||||
|
||||
export async function load(config: Config): Promise<[GraphModel | null, GraphModel | null]> {
|
||||
if (!models.detector) await loadDetect(config);
|
||||
if (!models.landmarks) await loadPose(config);
|
||||
return [models.detector, models.landmarks];
|
||||
}
|
||||
|
||||
function prepareImage(input: Tensor4D, size: number): Tensor {
|
||||
function prepareImage(input: Tensor4D, size: number, cropBox?: Box): Tensor {
|
||||
const t: Record<string, Tensor> = {};
|
||||
if (!input?.shape?.[1] || !input?.shape?.[2]) return input;
|
||||
let final: Tensor;
|
||||
|
@ -94,7 +76,7 @@ function prepareImage(input: Tensor4D, size: number): Tensor {
|
|||
return final;
|
||||
}
|
||||
|
||||
function rescaleKeypoints(keypoints: BodyKeypoint[], outputSize: [number, number]): BodyKeypoint[] {
|
||||
function rescaleKeypoints(keypoints: BodyKeypoint[], outputSize: [number, number], cropBox?: Box): BodyKeypoint[] {
|
||||
for (const kpt of keypoints) { // first rescale due to padding
|
||||
kpt.position = [
|
||||
Math.trunc(kpt.position[0] * (outputSize[0] + padding[2][0] + padding[2][1]) / outputSize[0] - padding[2][0]),
|
||||
|
@ -104,10 +86,12 @@ function rescaleKeypoints(keypoints: BodyKeypoint[], outputSize: [number, number
|
|||
kpt.positionRaw = [kpt.position[0] / outputSize[0], kpt.position[1] / outputSize[1], 2 * (kpt.position[2] as number) / (outputSize[0] + outputSize[1])];
|
||||
}
|
||||
if (cropBox) { // second rescale due to cropping
|
||||
const width = cropBox[2] - cropBox[0];
|
||||
const height = cropBox[3] - cropBox[1];
|
||||
for (const kpt of keypoints) {
|
||||
kpt.positionRaw = [
|
||||
kpt.positionRaw[0] + cropBox[1], // correct offset due to crop
|
||||
kpt.positionRaw[1] + cropBox[0], // correct offset due to crop
|
||||
kpt.positionRaw[0] / height + cropBox[1], // correct offset due to crop
|
||||
kpt.positionRaw[1] / width + cropBox[0], // correct offset due to crop
|
||||
kpt.positionRaw[2] as number,
|
||||
];
|
||||
kpt.position = [
|
||||
|
@ -140,9 +124,9 @@ async function detectLandmarks(input: Tensor, config: Config, outputSize: [numbe
|
|||
* t.world: 39 keypoints [x,y,z] normalized to -1..1
|
||||
* t.poseflag: body score
|
||||
*/
|
||||
if (!models.landmarks?.['executor']) return null;
|
||||
if (!model?.['executor']) return null;
|
||||
const t: Record<string, Tensor> = {};
|
||||
[t.ld/* 1,195(39*5) */, t.segmentation/* 1,256,256,1 */, t.heatmap/* 1,64,64,39 */, t.world/* 1,117(39*3) */, t.poseflag/* 1,1 */] = models.landmarks?.execute(input, outputNodes.landmarks) as unknown as Tensor[]; // run model
|
||||
[t.ld/* 1,195(39*5) */, t.segmentation/* 1,256,256,1 */, t.heatmap/* 1,64,64,39 */, t.world/* 1,117(39*3) */, t.poseflag/* 1,1 */] = model?.execute(input, outputNodes.landmarks) as Tensor[]; // run model
|
||||
const poseScore = (await t.poseflag.data())[0];
|
||||
const points = await t.ld.data();
|
||||
const distances = await t.world.data();
|
||||
|
@ -153,7 +137,7 @@ async function detectLandmarks(input: Tensor, config: Config, outputSize: [numbe
|
|||
const score = sigmoid(points[depth * i + 3]);
|
||||
const presence = sigmoid(points[depth * i + 4]);
|
||||
const adjScore = Math.trunc(100 * score * presence * poseScore) / 100;
|
||||
const positionRaw: Point = [points[depth * i + 0] / inputSize.landmarks[0], points[depth * i + 1] / inputSize.landmarks[1], points[depth * i + 2] + 0];
|
||||
const positionRaw: Point = [points[depth * i + 0] / inputSize, points[depth * i + 1] / inputSize, points[depth * i + 2] + 0];
|
||||
const position: Point = [Math.trunc(outputSize[0] * positionRaw[0]), Math.trunc(outputSize[1] * positionRaw[1]), positionRaw[2] as number];
|
||||
const distance: Point = [distances[depth * i + 0], distances[depth * i + 1], distances[depth * i + 2] + 0];
|
||||
keypointsRelative.push({ part: coords.kpt[i] as BodyLandmark, positionRaw, position, distance, score: adjScore });
|
||||
|
@ -177,36 +161,6 @@ async function detectLandmarks(input: Tensor, config: Config, outputSize: [numbe
|
|||
return body;
|
||||
}
|
||||
|
||||
/*
|
||||
interface DetectedBox { box: Box, boxRaw: Box, score: number }
|
||||
|
||||
function rescaleBoxes(boxes: Array<DetectedBox>, outputSize: [number, number]): Array<DetectedBox> {
|
||||
for (const b of boxes) {
|
||||
b.box = [
|
||||
Math.trunc(b.box[0] * (outputSize[0] + padding[2][0] + padding[2][1]) / outputSize[0]),
|
||||
Math.trunc(b.box[1] * (outputSize[1] + padding[1][0] + padding[1][1]) / outputSize[1]),
|
||||
Math.trunc(b.box[2] * (outputSize[0] + padding[2][0] + padding[2][1]) / outputSize[0]),
|
||||
Math.trunc(b.box[3] * (outputSize[1] + padding[1][0] + padding[1][1]) / outputSize[1]),
|
||||
];
|
||||
b.boxRaw = [b.box[0] / outputSize[0], b.box[1] / outputSize[1], b.box[2] / outputSize[0], b.box[3] / outputSize[1]];
|
||||
}
|
||||
return boxes;
|
||||
}
|
||||
|
||||
async function detectBoxes(input: Tensor, config: Config, outputSize: [number, number]) {
|
||||
const t: Record<string, Tensor> = {};
|
||||
t.res = models.detector?.execute(input, ['Identity']) as Tensor; //
|
||||
t.logitsRaw = tf.slice(t.res, [0, 0, 0], [1, -1, 1]);
|
||||
t.boxesRaw = tf.slice(t.res, [0, 0, 1], [1, -1, -1]);
|
||||
t.logits = tf.squeeze(t.logitsRaw);
|
||||
t.boxes = tf.squeeze(t.boxesRaw);
|
||||
const boxes = await detect.decode(t.boxes, t.logits, config, outputSize);
|
||||
rescaleBoxes(boxes, outputSize);
|
||||
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
|
||||
return boxes;
|
||||
}
|
||||
*/
|
||||
|
||||
export async function predict(input: Tensor4D, config: Config): Promise<BodyResult[]> {
|
||||
const outputSize: [number, number] = [input.shape[2] || 0, input.shape[1] || 0];
|
||||
const skipTime = (config.body.skipTime || 0) > (now() - lastTime);
|
||||
|
@ -214,15 +168,24 @@ export async function predict(input: Tensor4D, config: Config): Promise<BodyResu
|
|||
if (config.skipAllowed && skipTime && skipFrame && cache !== null) {
|
||||
skipped++;
|
||||
} else {
|
||||
const t: Record<string, Tensor> = {};
|
||||
/*
|
||||
if (config.body['detector'] && config.body['detector']['enabled']) {
|
||||
t.detector = await prepareImage(input, 224);
|
||||
const boxes = await detectBoxes(t.detector, config, outputSize);
|
||||
let boxes: DetectedBox[] = [];
|
||||
if (config.body?.['detector']?.['enabled']) {
|
||||
const preparedImage = prepareImage(input, 224);
|
||||
boxes = await detectBoxes(preparedImage, config, outputSize);
|
||||
tf.dispose(preparedImage);
|
||||
} else {
|
||||
boxes = [{ box: [0, 0, 0, 0] as Box, boxRaw: [0, 0, 1, 1], score: 0 }]; // running without detector
|
||||
}
|
||||
for (let i = 0; i < boxes.length; i++) {
|
||||
const preparedBox = prepareImage(input, 256, boxes[i]?.boxRaw); // padded and resized
|
||||
cache.length = 0;
|
||||
const bodyResult = await detectLandmarks(preparedBox, config, outputSize);
|
||||
tf.dispose(preparedBox);
|
||||
if (!bodyResult) continue;
|
||||
bodyResult.id = i;
|
||||
// bodyResult.score = 0; // TBD
|
||||
cache.push(bodyResult);
|
||||
}
|
||||
*/
|
||||
t.landmarks = prepareImage(input, 256); // padded and resized
|
||||
cache = await detectLandmarks(t.landmarks, config, outputSize);
|
||||
/*
|
||||
cropBox = [0, 0, 1, 1]; // reset crop coordinates
|
||||
if (cache?.boxRaw && config.skipAllowed) {
|
||||
|
@ -237,9 +200,8 @@ export async function predict(input: Tensor4D, config: Config): Promise<BodyResu
|
|||
}
|
||||
}
|
||||
*/
|
||||
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
|
||||
lastTime = now();
|
||||
skipped = 0;
|
||||
}
|
||||
return cache ? [cache] : [];
|
||||
return cache;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
import * as tf from 'dist/tfjs.esm.js';
|
||||
import type { Tensor } from '../tfjs/types';
|
||||
import { log } from '../util/util';
|
||||
import { env } from '../util/env';
|
||||
import { loadModel } from '../tfjs/load';
|
||||
import type { Box } from '../result';
|
||||
import type { Config } from '../config';
|
||||
import type { GraphModel, Tensor, Tensor1D, Tensor2D } from '../tfjs/types';
|
||||
|
||||
interface DetectedBox { box: Box, boxRaw: Box, score: number }
|
||||
export interface DetectedBox { box: Box, boxRaw: Box, score: number }
|
||||
|
||||
const inputSize = 224;
|
||||
let model: GraphModel | null;
|
||||
let inputSize = 224;
|
||||
let anchorTensor: { x, y };
|
||||
const numLayers = 5;
|
||||
const strides = [8, 16, 32, 32, 32];
|
||||
|
@ -35,8 +39,19 @@ export function createAnchors() {
|
|||
anchorTensor = { x: tf.tensor1d(anchors.map((a) => a.x)), y: tf.tensor1d(anchors.map((a) => a.y)) };
|
||||
}
|
||||
|
||||
export async function loadDetector(config: Config): Promise<GraphModel> {
|
||||
if (env.initial) model = null;
|
||||
if (!model && config.body['detector'] && config.body['detector'].modelPath || '') {
|
||||
model = await loadModel(config.body['detector'].modelPath);
|
||||
const inputs = model?.['executor'] ? Object.values(model.modelSignature['inputs']) : undefined;
|
||||
inputSize = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;
|
||||
} else if (config.debug && model) log('cached model:', model['modelUrl']);
|
||||
createAnchors();
|
||||
return model as GraphModel;
|
||||
}
|
||||
|
||||
const cropFactor = [5.0, 5.0];
|
||||
function decodeBoxes(boxesTensor, anchor): Tensor {
|
||||
export function decodeBoxes(boxesTensor, anchor) {
|
||||
return tf.tidy(() => {
|
||||
const split = tf.split(boxesTensor, 12, 1); // first 4 are box data [x,y,w,h] and 4 are keypoints data [x,y] for total of 12
|
||||
let xCenter = tf.squeeze(split[0]);
|
||||
|
@ -49,39 +64,41 @@ function decodeBoxes(boxesTensor, anchor): Tensor {
|
|||
height = tf.mul(tf.div(height, inputSize), cropFactor[1]);
|
||||
const xMin = tf.sub(xCenter, tf.div(width, 2));
|
||||
const yMin = tf.sub(yCenter, tf.div(height, 2));
|
||||
const boxes = tf.stack([xMin, yMin, width, height], 1);
|
||||
const xMax = tf.add(xMin, width);
|
||||
const yMax = tf.add(yMin, height);
|
||||
const boxes = tf.stack([xMin, yMin, xMax, yMax], 1);
|
||||
return boxes;
|
||||
});
|
||||
}
|
||||
|
||||
export async function decode(boxesTensor: Tensor, logitsTensor: Tensor, config: Config, outputSize: [number, number]): Promise<DetectedBox[]> {
|
||||
async function decodeResults(boxesTensor: Tensor, logitsTensor: Tensor, config: Config, outputSize: [number, number]): Promise<DetectedBox[]> {
|
||||
const detectedBoxes: DetectedBox[] = [];
|
||||
const t: Record<string, Tensor> = {};
|
||||
t.boxes = decodeBoxes(boxesTensor, anchorTensor);
|
||||
t.scores = tf.sigmoid(logitsTensor);
|
||||
t.argmax = tf.argMax(t.scores);
|
||||
const i = (await t.argmax.data())[0];
|
||||
t.nms = await tf.image.nonMaxSuppressionAsync(t.boxes as Tensor2D, t.scores as Tensor1D, 1, config.body['detector']?.minConfidence || 0.1, config.body['detector']?.iouThreshold || 0.1);
|
||||
const nms = await t.nms.data();
|
||||
const scores = await t.scores.data();
|
||||
const detected: { box: Box, boxRaw: Box, score: number }[] = [];
|
||||
const minScore = config.body?.['detector']?.minConfidence || 0;
|
||||
if (scores[i] >= minScore) {
|
||||
const boxes = await t.boxes.array();
|
||||
const boxRaw: Box = boxes[i];
|
||||
const box: Box = [boxRaw[0] * outputSize[0], boxRaw[1] * outputSize[1], boxRaw[2] * outputSize[0], boxRaw[3] * outputSize[1]];
|
||||
// console.log(box);
|
||||
detected.push({ box, boxRaw, score: scores[i] });
|
||||
}
|
||||
/*
|
||||
t.nms = await tf.image.nonMaxSuppressionAsync(t.boxes, t.scores, 1, config.body.detector?.minConfidence || 0.1, config.body.detector?.iouThreshold || 0.1);
|
||||
const boxes = t.boxes.arraySync();
|
||||
const scores = t.scores.dataSync();
|
||||
const nms = t.nms.dataSync();
|
||||
const detected: Array<DetectedBox> = [];
|
||||
const boxes = await t.boxes.array();
|
||||
for (const i of Array.from(nms)) {
|
||||
const score = scores[i];
|
||||
const boxRaw: Box = boxes[i];
|
||||
const box: Box = [boxRaw[0] * outputSize[0], boxRaw[0] * outputSize[1], boxRaw[3] * outputSize[0], boxRaw[2] * outputSize[1]];
|
||||
detected.push({ box, boxRaw, score: scores[i] });
|
||||
const box: Box = [Math.round(boxRaw[0] * outputSize[0]), Math.round(boxRaw[1] * outputSize[1]), Math.round(boxRaw[2] * outputSize[0]), Math.round(boxRaw[3] * outputSize[1])];
|
||||
const detectedBox: DetectedBox = { score, boxRaw, box };
|
||||
detectedBoxes.push(detectedBox);
|
||||
}
|
||||
*/
|
||||
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
|
||||
return detected;
|
||||
return detectedBoxes;
|
||||
}
|
||||
|
||||
export async function detectBoxes(input: Tensor, config: Config, outputSize: [number, number]) {
|
||||
const t: Record<string, Tensor> = {};
|
||||
t.res = model?.execute(input, ['Identity']) as Tensor; //
|
||||
t.logitsRaw = tf.slice(t.res, [0, 0, 0], [1, -1, 1]);
|
||||
t.boxesRaw = tf.slice(t.res, [0, 0, 1], [1, -1, -1]);
|
||||
t.logits = tf.squeeze(t.logitsRaw);
|
||||
t.boxes = tf.squeeze(t.boxesRaw);
|
||||
const boxes = await decodeResults(t.boxes, t.logits, config, outputSize);
|
||||
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
|
||||
return boxes;
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ export async function load(config: Config): Promise<GraphModel> {
|
|||
}
|
||||
|
||||
export async function predict(image: Tensor4D, config: Config, idx: number, count: number): Promise<number> {
|
||||
if (!model || !model?.['executor']) return 0;
|
||||
if (!model?.['executor']) return 0;
|
||||
const skipTime = (config.face.antispoof?.skipTime || 0) > (now() - lastTime);
|
||||
const skipFrame = skipped < (config.face.antispoof?.skipFrames || 0);
|
||||
if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && cached[idx]) {
|
||||
|
|
|
@ -21,6 +21,8 @@ export interface WebCamConfig {
|
|||
width: number,
|
||||
/** desired webcam height */
|
||||
height: number,
|
||||
/** deviceId of the video device to use */
|
||||
id?: string,
|
||||
}
|
||||
|
||||
export class WebCam { // eslint-disable-line @typescript-eslint/no-extraneous-class
|
||||
|
@ -30,6 +32,8 @@ export class WebCam { // eslint-disable-line @typescript-eslint/no-extraneous-cl
|
|||
element: HTMLVideoElement | undefined;
|
||||
/** active webcam stream */
|
||||
stream: MediaStream | undefined;
|
||||
/** enumerated video devices */
|
||||
devices: MediaDeviceInfo[] = [];
|
||||
|
||||
constructor() {
|
||||
this.config = {
|
||||
|
@ -88,6 +92,16 @@ export class WebCam { // eslint-disable-line @typescript-eslint/no-extraneous-cl
|
|||
return this.element?.videoHeight || 0;
|
||||
}
|
||||
|
||||
public enumerate = async (): Promise<MediaDeviceInfo[]> => {
|
||||
try {
|
||||
const devices = await navigator.mediaDevices.enumerateDevices();
|
||||
this.devices = devices.filter((device) => device.kind === 'videoinput');
|
||||
} catch {
|
||||
this.devices = [];
|
||||
}
|
||||
return this.devices;
|
||||
};
|
||||
|
||||
/** start method initializizes webcam stream and associates it with a dom video element */
|
||||
public start = async (webcamConfig?: Partial<WebCamConfig>): Promise<void> => {
|
||||
// set config
|
||||
|
@ -96,6 +110,7 @@ export class WebCam { // eslint-disable-line @typescript-eslint/no-extraneous-cl
|
|||
if (webcamConfig?.mode) this.config.mode = webcamConfig?.mode;
|
||||
if (webcamConfig?.width) this.config.width = webcamConfig?.width;
|
||||
if (webcamConfig?.height) this.config.height = webcamConfig?.height;
|
||||
if (webcamConfig?.id) this.config.id = webcamConfig?.id;
|
||||
|
||||
// use or create dom element
|
||||
if (webcamConfig?.element) {
|
||||
|
@ -128,6 +143,7 @@ export class WebCam { // eslint-disable-line @typescript-eslint/no-extraneous-cl
|
|||
height: { ideal: this.config.height > 0 ? this.config.height : window.innerHeight },
|
||||
},
|
||||
};
|
||||
if (this.config.id) (requestedConstraints.video as MediaTrackConstraintSet).deviceId = this.config.id;
|
||||
|
||||
// set default event listeners
|
||||
this.element.addEventListener('play', () => { if (this.config.debug) log('webcam', 'play'); });
|
||||
|
|
|
@ -123,8 +123,8 @@ export async function runCompile(instance: Human) {
|
|||
const numTensorsStart = tf.engine().state.numTensors;
|
||||
const compiledModels: string[] = [];
|
||||
for (const [modelName, model] of Object.entries(instance.models).filter(([key, val]) => (key !== null && val !== null))) {
|
||||
const shape = (model.inputs?.[0]?.shape) ? [...model.inputs[0].shape] : [1, 64, 64, 3];
|
||||
const dtype: DataType = (model.inputs?.[0]?.dtype) ? model.inputs[0].dtype : 'float32';
|
||||
const shape = (model?.modelSignature && model?.inputs?.[0]?.shape) ? [...model.inputs[0].shape] : [1, 64, 64, 3];
|
||||
const dtype: DataType = (model?.modelSignature && model?.inputs?.[0]?.dtype) ? model.inputs[0].dtype : 'float32';
|
||||
for (let dim = 0; dim < shape.length; dim++) {
|
||||
if (shape[dim] === -1) shape[dim] = dim === 0 ? 1 : 64; // override batch number and any dynamic dimensions
|
||||
}
|
||||
|
|
100
test/build.log
100
test/build.log
|
@ -1,50 +1,50 @@
|
|||
2022-11-12 12:48:06 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-11-12 12:48:06 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-11-12 12:48:06 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2022-11-12 12:48:06 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.13","typescript":"4.8.4","typedoc":"0.23.20","eslint":"8.27.0"}
|
||||
2022-11-12 12:48:06 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2022-11-12 12:48:06 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2022-11-12 12:48:06 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
||||
2022-11-12 12:48:06 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
||||
2022-11-12 12:48:06 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":79,"inputBytes":673330,"outputBytes":316862}
|
||||
2022-11-12 12:48:07 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
||||
2022-11-12 12:48:07 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":79,"inputBytes":673334,"outputBytes":316866}
|
||||
2022-11-12 12:48:07 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
||||
2022-11-12 12:48:07 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":79,"inputBytes":674282,"outputBytes":316977}
|
||||
2022-11-12 12:48:07 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
||||
2022-11-12 12:48:07 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":79,"inputBytes":673076,"outputBytes":315471}
|
||||
2022-11-12 12:48:07 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1144900}
|
||||
2022-11-12 12:48:07 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":79,"inputBytes":1817306,"outputBytes":1457057}
|
||||
2022-11-12 12:48:07 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":79,"inputBytes":1817306,"outputBytes":1914726}
|
||||
2022-11-12 12:48:11 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||
2022-11-12 12:48:13 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":76,"generated":true}
|
||||
2022-11-12 12:48:13 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5981,"outputBytes":2862}
|
||||
2022-11-12 12:48:13 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17166,"outputBytes":9243}
|
||||
2022-11-12 12:48:21 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":113,"errors":0,"warnings":0}
|
||||
2022-11-12 12:48:21 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2022-11-12 12:48:21 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs-core.d.ts","output":"types/tfjs-core.d.ts"}
|
||||
2022-11-12 12:48:21 [36mINFO: [39m Done...
|
||||
2022-11-12 12:48:21 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs.d.ts","output":"types/tfjs.esm.d.ts"}
|
||||
2022-11-12 12:48:21 [35mSTATE:[39m Copy: {"input":"src/types/tsconfig.json","output":"types/tsconfig.json"}
|
||||
2022-11-12 12:48:21 [35mSTATE:[39m Copy: {"input":"src/types/eslint.json","output":"types/.eslintrc.json"}
|
||||
2022-11-12 12:48:21 [35mSTATE:[39m Copy: {"input":"src/types/tfjs.esm.d.ts","output":"dist/tfjs.esm.d.ts"}
|
||||
2022-11-12 12:48:21 [35mSTATE:[39m Filter: {"input":"types/tfjs-core.d.ts"}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":195}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Write: {"output":"dist/human.esm-nobundle.d.ts"}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Write: {"output":"dist/human.esm.d.ts"}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Write: {"output":"dist/human.d.ts"}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Write: {"output":"dist/human.node-gpu.d.ts"}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Write: {"output":"dist/human.node.d.ts"}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Write: {"output":"dist/human.node-wasm.d.ts"}
|
||||
2022-11-12 12:48:22 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Models {"folder":"../human-models/models","models":43}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||
2022-11-12 12:48:22 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||
2022-11-12 12:48:23 [35mSTATE:[39m Models: {"count":58,"totalSize":386543911}
|
||||
2022-11-12 12:48:23 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||
2022-11-16 11:18:02 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-11-16 11:18:02 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.0"}
|
||||
2022-11-16 11:18:02 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2022-11-16 11:18:02 [36mINFO: [39m Toolchain: {"build":"0.7.14","esbuild":"0.15.14","typescript":"4.9.3","typedoc":"0.23.21","eslint":"8.27.0"}
|
||||
2022-11-16 11:18:02 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":79,"inputBytes":672671,"outputBytes":318315}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":79,"inputBytes":672675,"outputBytes":318319}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":79,"inputBytes":673623,"outputBytes":318430}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":79,"inputBytes":672417,"outputBytes":316894}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1144900}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":79,"inputBytes":1816647,"outputBytes":1458374}
|
||||
2022-11-16 11:18:02 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":79,"inputBytes":1816647,"outputBytes":1917806}
|
||||
2022-11-16 11:18:06 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||
2022-11-16 11:18:09 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":76,"generated":true}
|
||||
2022-11-16 11:18:09 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6136,"outputBytes":2914}
|
||||
2022-11-16 11:18:09 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17166,"outputBytes":9243}
|
||||
2022-11-16 11:18:17 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":113,"errors":0,"warnings":0}
|
||||
2022-11-16 11:18:17 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2022-11-16 11:18:17 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs-core.d.ts","output":"types/tfjs-core.d.ts"}
|
||||
2022-11-16 11:18:17 [36mINFO: [39m Done...
|
||||
2022-11-16 11:18:17 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs.d.ts","output":"types/tfjs.esm.d.ts"}
|
||||
2022-11-16 11:18:17 [35mSTATE:[39m Copy: {"input":"src/types/tsconfig.json","output":"types/tsconfig.json"}
|
||||
2022-11-16 11:18:17 [35mSTATE:[39m Copy: {"input":"src/types/eslint.json","output":"types/.eslintrc.json"}
|
||||
2022-11-16 11:18:17 [35mSTATE:[39m Copy: {"input":"src/types/tfjs.esm.d.ts","output":"dist/tfjs.esm.d.ts"}
|
||||
2022-11-16 11:18:17 [35mSTATE:[39m Filter: {"input":"types/tfjs-core.d.ts"}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":195}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Write: {"output":"dist/human.esm-nobundle.d.ts"}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Write: {"output":"dist/human.esm.d.ts"}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Write: {"output":"dist/human.d.ts"}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Write: {"output":"dist/human.node-gpu.d.ts"}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Write: {"output":"dist/human.node.d.ts"}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Write: {"output":"dist/human.node-wasm.d.ts"}
|
||||
2022-11-16 11:18:19 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Models {"folder":"../human-models/models","models":43}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||
2022-11-16 11:18:19 [35mSTATE:[39m Models: {"count":58,"totalSize":386543911}
|
||||
2022-11-16 11:18:19 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||
|
|
1316
test/test.log
1316
test/test.log
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue