update release

pull/356/head
Vladimir Mandic 2022-10-09 14:34:58 -04:00
parent ad55453f35
commit 5218439796
13 changed files with 1067 additions and 4186 deletions

View File

@ -1,6 +1,6 @@
# @vladmandic/human # @vladmandic/human
Version: **2.11.0** Version: **2.11.1**
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition** Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
Author: **Vladimir Mandic <mandic00@live.com>** Author: **Vladimir Mandic <mandic00@live.com>**
@ -9,8 +9,12 @@
## Changelog ## Changelog
### **HEAD -> main** 2022/09/29 mandic00@live.com ### **HEAD -> main** 2022/10/09 mandic00@live.com
### **origin/main** 2022/10/02 mandic00@live.com
- add human.webcam methods
- create funding.yml - create funding.yml
- fix rotation interpolation - fix rotation interpolation

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -10,8 +10,6 @@
import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
const humanConfig = { // user configuration for human, used to fine-tune behavior const humanConfig = { // user configuration for human, used to fine-tune behavior
// backend: 'wasm',
// wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.20.0/dist/',
modelBasePath: 'https://vladmandic.github.io/human-models/models/', modelBasePath: 'https://vladmandic.github.io/human-models/models/',
filter: { enabled: true, equalization: false, flip: false }, filter: { enabled: true, equalization: false, flip: false },
face: { enabled: false }, face: { enabled: false },

View File

@ -4,96 +4,6 @@
author: <https://github.com/vladmandic>' author: <https://github.com/vladmandic>'
*/ */
import*as i from"../../dist/human.esm.js";var m={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new i.Human(m);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+`
// demo/typescript/index.ts `,console.log(...t)},d=t=>a.fps.innerText=t,f=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function l(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(l)}async function c(){if(!a.video.paused){let r=e.next(e.result);e.config.filter.flip?e.draw.canvas(r.canvas,a.canvas):e.draw.canvas(a.video,a.canvas),await e.draw.all(a.canvas,r),f(r.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,d(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(c,30)}async function u(){await e.webcam.start({element:a.video,crop:!0}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function w(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),d("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.getModelStats()),o("models loaded:",Object.values(e.models).filter(t=>t!==null).length),d("initializing..."),await e.warmup(),await u(),await l(),await c()}window.onload=w;
import * as H from "../../dist/human.esm.js";
var humanConfig = {
modelBasePath: "../../models",
filter: { enabled: true, equalization: false, flip: false },
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },
body: { enabled: true },
hand: { enabled: true },
object: { enabled: false },
segmentation: { enabled: false },
gesture: { enabled: true }
};
var human = new H.Human(humanConfig);
human.env.perfadd = false;
human.draw.options.font = 'small-caps 18px "Lato"';
human.draw.options.lineHeight = 20;
var dom = {
video: document.getElementById("video"),
canvas: document.getElementById("canvas"),
log: document.getElementById("log"),
fps: document.getElementById("status"),
perf: document.getElementById("performance")
};
var timestamp = { detect: 0, draw: 0, tensors: 0, start: 0 };
var fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 };
var log = (...msg) => {
dom.log.innerText += msg.join(" ") + "\n";
console.log(...msg);
};
var status = (msg) => dom.fps.innerText = msg;
var perf = (msg) => dom.perf.innerText = "tensors:" + human.tf.memory().numTensors.toString() + " | performance: " + JSON.stringify(msg).replace(/"|{|}/g, "").replace(/,/g, " | ");
async function detectionLoop() {
if (!dom.video.paused) {
if (timestamp.start === 0)
timestamp.start = human.now();
await human.detect(dom.video);
const tensors = human.tf.memory().numTensors;
if (tensors - timestamp.tensors !== 0)
log("allocated tensors:", tensors - timestamp.tensors);
timestamp.tensors = tensors;
fps.detectFPS = Math.round(1e3 * 1e3 / (human.now() - timestamp.detect)) / 1e3;
fps.frames++;
fps.averageMs = Math.round(1e3 * (human.now() - timestamp.start) / fps.frames) / 1e3;
if (fps.frames % 100 === 0 && !dom.video.paused)
log("performance", { ...fps, tensors: timestamp.tensors });
}
timestamp.detect = human.now();
requestAnimationFrame(detectionLoop);
}
async function drawLoop() {
if (!dom.video.paused) {
const interpolated = human.next(human.result);
if (human.config.filter.flip)
human.draw.canvas(interpolated.canvas, dom.canvas);
else
human.draw.canvas(dom.video, dom.canvas);
await human.draw.all(dom.canvas, interpolated);
perf(interpolated.performance);
}
const now = human.now();
fps.drawFPS = Math.round(1e3 * 1e3 / (now - timestamp.draw)) / 1e3;
timestamp.draw = now;
status(dom.video.paused ? "paused" : `fps: ${fps.detectFPS.toFixed(1).padStart(5, " ")} detect | ${fps.drawFPS.toFixed(1).padStart(5, " ")} draw`);
setTimeout(drawLoop, 30);
}
async function webCam() {
await human.webcam.start({ element: dom.video, crop: true });
dom.canvas.width = human.webcam.width;
dom.canvas.height = human.webcam.height;
dom.canvas.onclick = async () => {
if (human.webcam.paused)
await human.webcam.play();
else
human.webcam.pause();
};
}
async function main() {
log("human version:", human.version, "| tfjs version:", human.tf.version["tfjs-core"]);
log("platform:", human.env.platform, "| agent:", human.env.agent);
status("loading...");
await human.load();
log("backend:", human.tf.getBackend(), "| available:", human.env.backends);
log("models stats:", human.getModelStats());
log("models loaded:", Object.values(human.models).filter((model) => model !== null).length);
status("initializing...");
await human.warmup();
await webCam();
await detectionLoop();
await drawLoop();
}
window.onload = main;
//# sourceMappingURL=index.js.map //# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@ -10,10 +10,6 @@
import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
const humanConfig: Partial<H.Config> = { // user configuration for human, used to fine-tune behavior const humanConfig: Partial<H.Config> = { // user configuration for human, used to fine-tune behavior
// backend: 'wasm',
// wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.20.0/dist/',
// cacheSensitivity: 0,
// async: false,
modelBasePath: '../../models', modelBasePath: '../../models',
filter: { enabled: true, equalization: false, flip: false }, filter: { enabled: true, equalization: false, flip: false },
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } }, face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },

View File

@ -11,7 +11,6 @@
"mb3-centernet": 4030290, "mb3-centernet": 4030290,
"models": 0, "models": 0,
"movenet-lightning": 4650216, "movenet-lightning": 4650216,
"selfie": 212886,
"age": 161240, "age": 161240,
"blazeface-back": 538928, "blazeface-back": 538928,
"blazeface-front": 402048, "blazeface-front": 402048,
@ -42,6 +41,8 @@
"movenet-thunder": 12477112, "movenet-thunder": 12477112,
"nanodet": 7574558, "nanodet": 7574558,
"posenet": 5032780, "posenet": 5032780,
"rvm": 3739355,
"selfie": 212886,
"blazepose-detect": 5928804, "blazepose-detect": 5928804,
"anti-spoofing": 853098, "anti-spoofing": 853098,
"efficientpose-i-lite": 2269064, "efficientpose-i-lite": 2269064,

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@ const tf = require('@tensorflow/tfjs'); // wasm backend requires tfjs to be load
const wasm = require('@tensorflow/tfjs-backend-wasm'); // wasm backend does not get auto-loaded in nodejs const wasm = require('@tensorflow/tfjs-backend-wasm'); // wasm backend does not get auto-loaded in nodejs
const { Canvas, Image } = require('canvas'); // eslint-disable-line node/no-extraneous-require, node/no-missing-require const { Canvas, Image } = require('canvas'); // eslint-disable-line node/no-extraneous-require, node/no-missing-require
const H = require('../dist/human.node-wasm.js'); const H = require('../dist/human.node-wasm.js');
const test = require('./test-node-main.js').test; const test = require('./test-node-main.js');
H.env.Canvas = Canvas; // requires monkey-patch as wasm does not have tf.browser namespace H.env.Canvas = Canvas; // requires monkey-patch as wasm does not have tf.browser namespace
H.env.Image = Image; // requires monkey-patch as wasm does not have tf.browser namespace H.env.Image = Image; // requires monkey-patch as wasm does not have tf.browser namespace
@ -35,12 +35,16 @@ const config = {
}; };
async function main() { async function main() {
wasm.setWasmPaths(config.wasmPath); wasm.setWasmPaths(config.wasmPath, true);
await tf.setBackend('wasm'); const ok = await tf.setBackend('wasm');
if (!ok) {
test.log('error', 'failed: setwasmpath', config.wasmPath);
return;
}
await tf.ready(); await tf.ready();
H.env.updateBackend(); H.env.updateBackend();
log.info(H.env.wasm, config.wasmPath); log.info(H.env.wasm, config.wasmPath);
test(H.Human, config); test.test(H.Human, config);
} }
if (require.main === module) main(); if (require.main === module) main();

View File

@ -572,3 +572,4 @@ async function test(Human, inputConfig) {
} }
exports.test = test; exports.test = test;
exports.log = log;

File diff suppressed because it is too large Load Diff

View File

@ -52,7 +52,7 @@
"tabSize": 2 "tabSize": 2
}, },
"exclude": ["node_modules/", "types/", "dist/**/*.js"], "exclude": ["node_modules/", "types/", "dist/**/*.js"],
"include": ["src", "tfjs/*.ts", "types/human.d.ts", "test/**/*.ts", "demo/**/*.ts", "demo/segmentation/index.js", "demo/index.js"], "include": ["src", "tfjs/*.ts", "types/human.d.ts", "test/**/*.ts", "demo/**/*.ts"],
"typedocOptions": { "typedocOptions": {
"externalPattern": ["node_modules/", "tfjs/"] "externalPattern": ["node_modules/", "tfjs/"]
} }