mirror of https://github.com/vladmandic/human
webgpu and wasm optimizations
parent
62376a5ca2
commit
7ec9dfe130
|
@ -9,9 +9,13 @@
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2022/05/24 mandic00@live.com
|
||||
### **HEAD -> main** 2022/05/30 mandic00@live.com
|
||||
|
||||
|
||||
### **origin/main** 2022/05/29 mandic00@live.com
|
||||
|
||||
- full rebuild
|
||||
|
||||
### **2.7.4** 2022/05/24 mandic00@live.com
|
||||
|
||||
|
||||
|
|
60
TODO.md
60
TODO.md
|
@ -1,38 +1,64 @@
|
|||
# To-Do list for Human library
|
||||
|
||||
## Work in Progress
|
||||
## Work-in-Progress
|
||||
|
||||
### Exploring
|
||||
N/A
|
||||
|
||||
- Optical flow: <https://docs.opencv.org/3.3.1/db/d7f/tutorial_js_lucas_kanade.html>
|
||||
- Advanced histogram equalization: Adaptive, Contrast Limited, CLAHE
|
||||
- TFLite models: <https://js.tensorflow.org/api_tflite/0.0.1-alpha.4/>
|
||||
- Body segmentation: `robust-video-matting`
|
||||
<hr><br>
|
||||
|
||||
<br><hr><br>
|
||||
## Exploring
|
||||
|
||||
- Optical flow for intelligent temporal interpolation
|
||||
<https://docs.opencv.org/3.3.1/db/d7f/tutorial_js_lucas_kanade.html>
|
||||
- Advanced histogram equalization for optimization of badly lit scenes
|
||||
**Adaptive**, **Contrast Limited**, **CLAHE**
|
||||
- TFLite models
|
||||
<https://js.tensorflow.org/api_tflite/0.0.1-alpha.4/>
|
||||
- Body segmentation with temporal analysis
|
||||
<https://github.com/PeterL1n/RobustVideoMatting>
|
||||
|
||||
<hr><br>
|
||||
|
||||
## Known Issues
|
||||
### Face with Attention
|
||||
`FaceMesh-Landmarks` model is supported only with `CPU` and `WebGL` backends due to `TFJS` issues
|
||||
|
||||
#### WebGPU
|
||||
### WASM
|
||||
|
||||
Experimental support only until support is officially added in Chromium
|
||||
Support for **WASM SIMD** and **WASM MultiThreading** is still disabled by default in **Chromium** based browsers
|
||||
Suggestion is to enable it manually for major performance boost
|
||||
Enable via <chrome://flags/#enable-experimental-webassembly-features>
|
||||
|
||||
### WebGPU
|
||||
|
||||
Experimental support only until support is officially added in Chromium
|
||||
Enable via <chrome://flags/#enable-unsafe-webgpu>
|
||||
|
||||
### Firefox
|
||||
|
||||
Running in **web workers** requires `OffscreenCanvas` which is still disabled by default in **Firefox**
|
||||
Enable via `about:config` -> `gfx.offscreencanvas.enabled`
|
||||
|
||||
### Face Detection
|
||||
|
||||
Enhanced rotation correction for face detection is not working in NodeJS due to missing kernel op in TFJS
|
||||
Feature is automatically disabled in NodeJS without user impact
|
||||
Enhanced rotation correction for face detection is not working in **NodeJS** due to missing kernel op in **TFJS**
|
||||
Feature is automatically disabled in **NodeJS** without user impact
|
||||
|
||||
- Backend NodeJS missing kernel op `RotateWithOffset`
|
||||
<https://github.com/tensorflow/tfjs/issues/5473>
|
||||
<hr><br>
|
||||
|
||||
<br><hr><br>
|
||||
|
||||
## Pending Release Notes
|
||||
## Pending Release Changes
|
||||
|
||||
- Updated **FaceMesh-Landmarks** models
|
||||
- Added **FaceMesh-with-Attention** model is disabled by defauls, enable using
|
||||
`config.face.mesh.attention = true`
|
||||
- If **FaceMesh-with-Attention** model is anbled, Iris model gets disabled
|
||||
- If **FaceMesh-with-Attention** model is enabled, Iris model gets disabled
|
||||
as its functionality is superseded by attention model
|
||||
- Results include more detailed face mesh annotations
|
||||
`result.face[].annotations`
|
||||
- Update benchmarks: `demo/benchmark`
|
||||
- Slight performance improvement with canvas reads using `WASM` backend
|
||||
- Default to RTM version of `WASM` binaries
|
||||
- Enhanced `WebGPU` compatibility
|
||||
- Support for **NodeJS v18**
|
||||
- Updated **Known Issues**
|
||||
See <https://github.com/vladmandic/human/blob/main/TODO.md>
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
|
||||
import{Human as p}from"../../dist/human.esm.js";var w={async:!0,modelBasePath:"../../models",filter:{enabled:!0,equalization:!1},cacheSensitivity:0,face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},t=new p(w);t.env.perfadd=!1;t.draw.options.font='small-caps 18px "Lato"';t.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},o={detect:0,draw:0,tensors:0},d={detect:0,draw:0},s=(...a)=>{e.log.innerText+=a.join(" ")+`
|
||||
`,console.log(...a)},r=a=>e.fps.innerText=a,b=a=>e.perf.innerText="tensors:"+t.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth},height:{ideal:document.body.clientHeight}}},n=await navigator.mediaDevices.getUserMedia(a),m=new Promise(f=>{e.video.onloadeddata=()=>f(!0)});e.video.srcObject=n,e.video.play(),await m,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight;let i=n.getVideoTracks()[0],g=i.getCapabilities?i.getCapabilities():"",u=i.getSettings?i.getSettings():"",v=i.getConstraints?i.getConstraints():"";s("video:",e.video.videoWidth,e.video.videoHeight,i.label,{stream:n,track:i,settings:u,constraints:v,capabilities:g}),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function c(){if(!e.video.paused){await t.detect(e.video);let n=t.tf.memory().numTensors;n-o.tensors!==0&&s("allocated tensors:",n-o.tensors),o.tensors=n}let a=t.now();d.detect=1e3/(a-o.detect),o.detect=a,requestAnimationFrame(c)}async function l(){if(!e.video.paused){let n=await t.next(t.result);await t.draw.canvas(e.video,e.canvas),await t.draw.all(e.canvas,n),b(n.performance)}let a=t.now();d.draw=1e3/(a-o.draw),o.draw=a,r(e.video.paused?"paused":`fps: ${d.detect.toFixed(1).padStart(5," ")} detect | ${d.draw.toFixed(1).padStart(5," ")} draw`),setTimeout(l,30)}async function y(){s("human version:",t.version,"| tfjs version:",t.tf.version["tfjs-core"]),s("platform:",t.env.platform,"| agent:",t.env.agent),r("loading..."),await t.load(),s("backend:",t.tf.getBackend(),"| available:",t.env.backends),s("loaded models:",Object.values(t.models).filter(a=>a!==null).length),r("initializing..."),await t.warmup(),await h(),await c(),await l()}window.onload=y;
|
||||
import{Human as p}from"../../dist/human.esm.js";var w={async:!0,modelBasePath:"../../models",filter:{enabled:!0,equalization:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},t=new p(w);t.env.perfadd=!1;t.draw.options.font='small-caps 18px "Lato"';t.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},i={detect:0,draw:0,tensors:0},d={detect:0,draw:0},s=(...a)=>{e.log.innerText+=a.join(" ")+`
|
||||
`,console.log(...a)},r=a=>e.fps.innerText=a,b=a=>e.perf.innerText="tensors:"+t.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth},height:{ideal:document.body.clientHeight}}},n=await navigator.mediaDevices.getUserMedia(a),m=new Promise(f=>{e.video.onloadeddata=()=>f(!0)});e.video.srcObject=n,e.video.play(),await m,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight;let o=n.getVideoTracks()[0],g=o.getCapabilities?o.getCapabilities():"",u=o.getSettings?o.getSettings():"",v=o.getConstraints?o.getConstraints():"";s("video:",e.video.videoWidth,e.video.videoHeight,o.label,{stream:n,track:o,settings:u,constraints:v,capabilities:g}),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function c(){if(!e.video.paused){await t.detect(e.video);let n=t.tf.memory().numTensors;n-i.tensors!==0&&s("allocated tensors:",n-i.tensors),i.tensors=n}let a=t.now();d.detect=1e3/(a-i.detect),i.detect=a,requestAnimationFrame(c)}async function l(){if(!e.video.paused){let n=await t.next(t.result);await t.draw.canvas(e.video,e.canvas),await t.draw.all(e.canvas,n),b(n.performance)}let a=t.now();d.draw=1e3/(a-i.draw),i.draw=a,r(e.video.paused?"paused":`fps: ${d.detect.toFixed(1).padStart(5," ")} detect | ${d.draw.toFixed(1).padStart(5," ")} draw`),setTimeout(l,30)}async function y(){s("human version:",t.version,"| tfjs version:",t.tf.version["tfjs-core"]),s("platform:",t.env.platform,"| agent:",t.env.agent),r("loading..."),await t.load(),s("backend:",t.tf.getBackend(),"| available:",t.env.backends),s("loaded models:",Object.values(t.models).filter(a=>a!==null).length),r("initializing..."),await t.warmup(),await h(),await c(),await l()}window.onload=y;
|
||||
/**
|
||||
* Human demo for browsers
|
||||
* @default Human Library
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -12,11 +12,10 @@ import { Human, Config } from '../../dist/human.esm.js'; // equivalent of @vladm
|
|||
const humanConfig: Partial<Config> = { // user configuration for human, used to fine-tune behavior
|
||||
// backend: 'webgpu' as const,
|
||||
// wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.18.0/dist/',
|
||||
// cacheSensitivity: 0,
|
||||
async: true,
|
||||
modelBasePath: '../../models',
|
||||
filter: { enabled: true, equalization: false },
|
||||
//
|
||||
cacheSensitivity: 0,
|
||||
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },
|
||||
body: { enabled: true },
|
||||
hand: { enabled: true },
|
||||
|
|
10
package.json
10
package.json
|
@ -65,10 +65,10 @@
|
|||
"@tensorflow/tfjs-layers": "^3.18.0",
|
||||
"@tensorflow/tfjs-node": "^3.18.0",
|
||||
"@tensorflow/tfjs-node-gpu": "^3.18.0",
|
||||
"@types/node": "^17.0.36",
|
||||
"@types/node": "^17.0.38",
|
||||
"@types/offscreencanvas": "^2019.7.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.26.0",
|
||||
"@typescript-eslint/parser": "^5.26.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.27.0",
|
||||
"@typescript-eslint/parser": "^5.27.0",
|
||||
"@vladmandic/build": "^0.7.4",
|
||||
"@vladmandic/pilogger": "^0.4.4",
|
||||
"@vladmandic/tfjs": "github:vladmandic/tfjs",
|
||||
|
@ -82,11 +82,11 @@
|
|||
"eslint-plugin-json": "^3.1.0",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-promise": "^6.0.0",
|
||||
"node-fetch": "^3.2.4",
|
||||
"node-fetch": "^3.2.5",
|
||||
"rimraf": "^3.0.2",
|
||||
"seedrandom": "^3.0.5",
|
||||
"tslib": "^2.4.0",
|
||||
"typedoc": "0.22.15",
|
||||
"typedoc": "0.22.17",
|
||||
"typescript": "4.7.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,11 +5,11 @@ export async function augment(rawCoords, results: Tensor[]) {
|
|||
const t: Record<string, Float32Array> = { // all attention models produce 2d results so it needs to be later augmented with correct z-coords
|
||||
// mesh: results[0], // already have it in rawCoords // output_mesh_identity
|
||||
// flag: results[1], // already processed in parent // conv_faceflag
|
||||
lips: results.filter((r) => r.size === 160)[0].dataSync() as Float32Array, // 80 x 2d = 160 // output_lips
|
||||
irisL: results.filter((r) => r.size === 10)[0].dataSync() as Float32Array, // 5 x 2d = 10 // output_right_iris
|
||||
eyeL: results.filter((r) => r.size === 142)[0].dataSync() as Float32Array, // 71 x 2d = 142 // output_right_eye
|
||||
irisR: results.filter((r) => r.size === 10)[1].dataSync() as Float32Array, // 5 x 2d = 10 // output_left_iris
|
||||
eyeR: results.filter((r) => r.size === 142)[1].dataSync() as Float32Array, // 71 x 2d = 142// output_left_eye
|
||||
lips: await results.filter((r) => r.size === 160)[0].data() as Float32Array, // 80 x 2d = 160 // output_lips
|
||||
irisL: await results.filter((r) => r.size === 10)[0].data() as Float32Array, // 5 x 2d = 10 // output_right_iris
|
||||
eyeL: await results.filter((r) => r.size === 142)[0].data() as Float32Array, // 71 x 2d = 142 // output_right_eye
|
||||
irisR: await results.filter((r) => r.size === 10)[1].data() as Float32Array, // 5 x 2d = 10 // output_left_iris
|
||||
eyeR: await results.filter((r) => r.size === 142)[1].data() as Float32Array, // 71 x 2d = 142// output_left_eye
|
||||
};
|
||||
|
||||
// augment iris: adds additional 5 keypoints per eye
|
||||
|
|
|
@ -26,7 +26,7 @@ export class FaceBoxes {
|
|||
const resizeT = tf.image.resizeBilinear(input, [this.inputSize, this.inputSize]);
|
||||
const castT = resizeT.toInt();
|
||||
const [scoresT, boxesT, numT] = await this.model.executeAsync(castT) as Tensor[];
|
||||
const scores = scoresT.dataSync();
|
||||
const scores = await scoresT.data();
|
||||
const squeezeT = tf.squeeze(boxesT);
|
||||
const boxes = squeezeT.arraySync();
|
||||
scoresT.dispose();
|
||||
|
|
|
@ -88,7 +88,7 @@ export async function predict(input: Tensor, config: Config): Promise<FaceResult
|
|||
const results = model.execute(face.tensor as Tensor) as Array<Tensor>;
|
||||
const confidenceT = results.find((t) => t.shape[t.shape.length - 1] === 1) as Tensor;
|
||||
const meshT = results.find((t) => t.shape[t.shape.length - 1] === 1404) as Tensor;
|
||||
const faceConfidence = confidenceT.dataSync();
|
||||
const faceConfidence = await confidenceT.data();
|
||||
face.faceScore = Math.round(100 * faceConfidence[0]) / 100;
|
||||
const coordsReshaped = tf.reshape(meshT, [-1, 3]);
|
||||
let rawCoords = await coordsReshaped.array();
|
||||
|
|
|
@ -125,9 +125,13 @@ export class Human {
|
|||
*/
|
||||
constructor(userConfig?: Partial<Config>) {
|
||||
this.env = env;
|
||||
/*
|
||||
defaults.wasmPath = tf.version['tfjs-core'].includes('-') // custom build or official build
|
||||
? 'https://vladmandic.github.io/tfjs/dist/'
|
||||
: `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`;
|
||||
*/
|
||||
const tfVersion = (tf.version?.tfjs || tf.version_core).replace(/-(.*)/, '');
|
||||
defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tfVersion}/dist/`;
|
||||
defaults.modelBasePath = env.browser ? '../models/' : 'file://models/';
|
||||
defaults.backend = env.browser ? 'humangl' : 'tensorflow';
|
||||
this.version = app.version; // expose version property on instance of class
|
||||
|
|
|
@ -58,6 +58,16 @@ export async function check(instance: Human, force = false) {
|
|||
} else {
|
||||
const adapter = await navigator['gpu'].requestAdapter();
|
||||
if (instance.config.debug) log('enumerated webgpu adapter:', adapter);
|
||||
if (!adapter) {
|
||||
log('override: backend set to webgpu but browser reports no available gpu');
|
||||
instance.config.backend = 'humangl';
|
||||
} else {
|
||||
// @ts-ignore requestAdapterInfo is not in tslib
|
||||
// eslint-disable-next-line no-undef
|
||||
const adapterInfo = 'requestAdapterInfo' in adapter ? await (adapter as GPUAdapter).requestAdapterInfo() : undefined;
|
||||
// if (adapter.features) adapter.features.forEach((feature) => log('webgpu features:', feature));
|
||||
log('webgpu adapter info:', adapterInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,6 +86,9 @@ export async function check(instance: Human, force = false) {
|
|||
|
||||
// customize wasm
|
||||
if (instance.config.backend === 'wasm') {
|
||||
try {
|
||||
tf.env().set('CANVAS2D_WILL_READ_FREQUENTLY', true);
|
||||
} catch { /**/ }
|
||||
if (instance.config.debug) log('wasm path:', instance.config.wasmPath);
|
||||
if (typeof tf?.setWasmPaths !== 'undefined') await tf.setWasmPaths(instance.config.wasmPath, instance.config.wasmPlatformFetch);
|
||||
else throw new Error('backend error: attempting to use wasm backend but wasm path is not set');
|
||||
|
|
|
@ -1,25 +1,24 @@
|
|||
2022-05-29 21:08:33 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.8.0"}
|
||||
2022-05-29 21:08:33 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2022-05-29 21:08:33 [36mINFO: [39m Toolchain: {"build":"0.7.3","esbuild":"0.14.42","typescript":"4.7.2","typedoc":"0.22.15","eslint":"8.16.0"}
|
||||
2022-05-29 21:08:33 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2022-05-29 21:08:33 [35mSTATE:[39m Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]}
|
||||
2022-05-29 21:08:33 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":595}
|
||||
2022-05-29 21:08:33 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":73,"inputBytes":641913,"outputBytes":300409}
|
||||
2022-05-29 21:08:33 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":599}
|
||||
2022-05-29 21:08:33 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":73,"inputBytes":641917,"outputBytes":300413}
|
||||
2022-05-29 21:08:33 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":651}
|
||||
2022-05-29 21:08:33 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":73,"inputBytes":641969,"outputBytes":300463}
|
||||
2022-05-29 21:08:33 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1069,"outputBytes":358}
|
||||
2022-05-29 21:08:33 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1032,"outputBytes":583}
|
||||
2022-05-29 21:08:33 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":73,"inputBytes":641901,"outputBytes":299319}
|
||||
2022-05-29 21:08:34 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1354096}
|
||||
2022-05-29 21:08:34 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":73,"inputBytes":1995414,"outputBytes":1652490}
|
||||
2022-05-29 21:08:34 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":73,"inputBytes":1995414,"outputBytes":2138654}
|
||||
2022-05-29 21:08:40 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":116}
|
||||
2022-05-29 21:08:40 [33mWARN: [39m You are running with an unsupported TypeScript version! This may work, or it might break. TypeDoc supports 4.0, 4.1, 4.2, 4.3, 4.4, 4.5, 4.6
|
||||
2022-05-29 21:08:42 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":73,"generated":true}
|
||||
2022-05-29 21:08:42 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6056,"outputBytes":3008}
|
||||
2022-05-29 21:08:42 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":15174,"outputBytes":7820}
|
||||
2022-05-29 21:08:50 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":105,"errors":0,"warnings":0}
|
||||
2022-05-29 21:08:50 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2022-05-29 21:08:50 [36mINFO: [39m Done...
|
||||
2022-06-02 10:37:20 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.8.0"}
|
||||
2022-06-02 10:37:20 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2022-06-02 10:37:20 [36mINFO: [39m Toolchain: {"build":"0.7.3","esbuild":"0.14.42","typescript":"4.7.2","typedoc":"0.22.17","eslint":"8.16.0"}
|
||||
2022-06-02 10:37:20 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":595}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":73,"inputBytes":642827,"outputBytes":300685}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":599}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":73,"inputBytes":642831,"outputBytes":300689}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":651}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":73,"inputBytes":642883,"outputBytes":300739}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1069,"outputBytes":358}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1032,"outputBytes":583}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":73,"inputBytes":642815,"outputBytes":299595}
|
||||
2022-06-02 10:37:20 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1353524}
|
||||
2022-06-02 10:37:21 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":73,"inputBytes":1995756,"outputBytes":1652193}
|
||||
2022-06-02 10:37:21 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":73,"inputBytes":1995756,"outputBytes":2139052}
|
||||
2022-06-02 10:37:25 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":116}
|
||||
2022-06-02 10:37:27 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":73,"generated":true}
|
||||
2022-06-02 10:37:27 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6054,"outputBytes":2989}
|
||||
2022-06-02 10:37:27 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":15174,"outputBytes":7820}
|
||||
2022-06-02 10:37:35 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":106,"errors":0,"warnings":0}
|
||||
2022-06-02 10:37:35 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2022-06-02 10:37:35 [36mINFO: [39m Done...
|
||||
|
|
|
@ -12,10 +12,9 @@ Human.env.Image = Image; // requires monkey-patch as wasm does not have tf.brows
|
|||
const config = {
|
||||
cacheSensitivity: 0,
|
||||
modelBasePath: 'https://vladmandic.github.io/human/models/',
|
||||
// modelBasePath: 'http://localhost:10030/models/',
|
||||
backend: 'wasm',
|
||||
wasmPath: 'node_modules/@tensorflow/tfjs-backend-wasm/dist/',
|
||||
// wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/',
|
||||
// wasmPath: 'node_modules/@tensorflow/tfjs-backend-wasm/dist/',
|
||||
wasmPath: `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`,
|
||||
debug: false,
|
||||
async: false,
|
||||
face: {
|
||||
|
|
1368
test/test.log
1368
test/test.log
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue