additional human.performance counters

pull/280/head
Vladimir Mandic 2021-10-27 09:45:38 -04:00
parent 856ca5a9a0
commit 405fd8a6c2
9 changed files with 133 additions and 62 deletions

View File

@ -9,8 +9,12 @@
## Changelog
### **HEAD -> main** 2021/10/26 mandic00@live.com
### **2.4.2** 2021/10/27 mandic00@live.com
### **origin/main** 2021/10/27 mandic00@live.com
- switch from es2018 to es2020 for main build
- switch to custom tfjs for demos
### **release: 2.4.1** 2021/10/25 mandic00@live.com

View File

@ -16,7 +16,7 @@
<style>
@font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 100; src: local('Lato'), url('../../assets/lato-light.woff2') }
html { font-family: 'Lato', 'Segoe UI'; font-size: 16px; font-variant: small-caps; }
body { margin: 0; background: black; color: white; overflow-x: hidden; width: 100vw; height: 100vh; text-align: center; }
body { margin: 0; background: black; color: white; overflow-x: hidden; width: 100vw; height: 100vh; }
body::-webkit-scrollbar { display: none; }
</style>
</head>
@ -24,5 +24,7 @@
<canvas id="canvas" style="margin: 0 auto; width: 100%"></canvas>
<video id="video" playsinline style="display: none"></video>
<pre id="status" style="position: absolute; top: 20px; right: 20px; background-color: grey; padding: 8px; box-shadow: 2px 2px black"></pre>
<pre id="log" style="padding: 8px"></pre>
<div id="performance" style="position: absolute; bottom: 0; width: 100%; padding: 8px; font-size: 0.8rem;"></div>
</body>
</html>

View File

@ -8,70 +8,95 @@
import Human from "../../dist/human.custom.esm.js";
var config = {
modelBasePath: "../../models",
backend: "humangl"
backend: "humangl",
async: true
};
var human = new Human(config);
var result;
var video = document.getElementById("video");
var canvas = document.getElementById("canvas");
var fps = { detect: 0, draw: 0, element: document.getElementById("status") };
var log = (...msg) => console.log(...msg);
var dom = {
video: document.getElementById("video"),
canvas: document.getElementById("canvas"),
log: document.getElementById("log"),
fps: document.getElementById("status"),
perf: document.getElementById("performance")
};
var fps = { detect: 0, draw: 0 };
var log = (...msg) => {
dom.log.innerText += msg.join(" ") + "\n";
console.log(...msg);
};
var status = (msg) => {
if (fps.element)
fps.element.innerText = msg;
dom.fps.innerText = msg;
};
var perf = (msg) => {
dom.perf.innerText = "performance: " + JSON.stringify(msg).replace(/"|{|}/g, "").replace(/,/g, " | ");
};
async function webCam() {
status("starting webcam...");
const options = { audio: false, video: { facingMode: "user", resizeMode: "none", width: { ideal: document.body.clientWidth } } };
const stream = await navigator.mediaDevices.getUserMedia(options);
const ready = new Promise((resolve) => {
video.onloadeddata = () => resolve(true);
dom.video.onloadeddata = () => resolve(true);
});
video.srcObject = stream;
video.play();
dom.video.srcObject = stream;
dom.video.play();
await ready;
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
dom.canvas.width = dom.video.videoWidth;
dom.canvas.height = dom.video.videoHeight;
const track = stream.getVideoTracks()[0];
const capabilities = track.getCapabilities();
const settings = track.getSettings();
const constraints = track.getConstraints();
log("video:", video.videoWidth, video.videoHeight, { stream, track, settings, constraints, capabilities });
canvas.onclick = () => {
if (video.paused)
video.play();
log("video:", dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });
dom.canvas.onclick = () => {
if (dom.video.paused)
dom.video.play();
else
video.pause();
dom.video.pause();
};
}
async function detectionLoop() {
const t0 = human.now();
if (!video.paused)
result = await human.detect(video);
if (!dom.video.paused) {
result = await human.detect(dom.video);
}
const t1 = human.now();
fps.detect = 1e3 / (t1 - t0);
requestAnimationFrame(detectionLoop);
}
async function drawLoop() {
const t0 = human.now();
if (!video.paused) {
if (!dom.video.paused) {
const interpolated = await human.next(result);
await human.draw.canvas(video, canvas);
await human.draw.all(canvas, interpolated);
await human.draw.canvas(dom.video, dom.canvas);
await human.draw.all(dom.canvas, interpolated);
perf(interpolated.performance);
}
const t1 = human.now();
fps.draw = 1e3 / (t1 - t0);
status(video.paused ? "paused" : `fps: ${fps.detect.toFixed(1).padStart(5, " ")} detect / ${fps.draw.toFixed(1).padStart(5, " ")} draw`);
status(dom.video.paused ? "paused" : `fps: ${fps.detect.toFixed(1).padStart(5, " ")} detect / ${fps.draw.toFixed(1).padStart(5, " ")} draw`);
requestAnimationFrame(drawLoop);
}
async function main() {
log("human version:", human.version, "tfjs:", human.tf.version_core);
log("platform:", human.env.platform, "agent:", human.env.agent);
human.env.perfadd = true;
status("loading...");
await human.load();
status("initializing...");
log("backend:", human.tf.getBackend(), "available:", human.env.backends);
await human.warmup();
await webCam();
await detectionLoop();
await drawLoop();
}
window.onload = main;
/**
* Human demo for browsers
* @default Human Library
* @summary <https://github.com/vladmandic/human>
* @author <https://github.com/vladmandic>
* @copyright <https://github.com/vladmandic>
* @license MIT
*/
//# sourceMappingURL=index.js.map

View File

@ -1,6 +1,10 @@
/**
* Human demo for browsers
* @description Simple Human demo for browsers using WebCam
* @default Human Library
* @summary <https://github.com/vladmandic/human>
* @author <https://github.com/vladmandic>
* @copyright <https://github.com/vladmandic>
* @license MIT
*/
import Human from '../../dist/human.custom.esm.js'; // equivalent of @vladmandic/human
@ -8,43 +12,60 @@ import Human from '../../dist/human.custom.esm.js'; // equivalent of @vladmandic
const config = {
modelBasePath: '../../models',
backend: 'humangl',
async: true,
};
const human = new Human(config);
let result;
const video = document.getElementById('video') as HTMLVideoElement;
const canvas = document.getElementById('canvas') as HTMLCanvasElement;
const fps = { detect: 0, draw: 0, element: document.getElementById('status') };
const dom = {
video: document.getElementById('video') as HTMLVideoElement,
canvas: document.getElementById('canvas') as HTMLCanvasElement,
log: document.getElementById('log') as HTMLPreElement,
fps: document.getElementById('status') as HTMLPreElement,
perf: document.getElementById('performance') as HTMLDivElement,
};
// eslint-disable-next-line no-console
const log = (...msg) => console.log(...msg);
const status = (msg) => { if (fps.element) fps.element.innerText = msg; };
const fps = { detect: 0, draw: 0 };
const log = (...msg) => {
dom.log.innerText += msg.join(' ') + '\n';
// eslint-disable-next-line no-console
console.log(...msg);
};
const status = (msg) => {
dom.fps.innerText = msg;
};
const perf = (msg) => {
dom.perf.innerText = 'performance: ' + JSON.stringify(msg).replace(/"|{|}/g, '').replace(/,/g, ' | ');
};
async function webCam() {
status('starting webcam...');
const options = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };
const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);
const ready = new Promise((resolve) => { video.onloadeddata = () => resolve(true); });
video.srcObject = stream;
video.play();
const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });
dom.video.srcObject = stream;
dom.video.play();
await ready;
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
dom.canvas.width = dom.video.videoWidth;
dom.canvas.height = dom.video.videoHeight;
const track: MediaStreamTrack = stream.getVideoTracks()[0];
const capabilities: MediaTrackCapabilities = track.getCapabilities();
const settings: MediaTrackSettings = track.getSettings();
const constraints: MediaTrackConstraints = track.getConstraints();
log('video:', video.videoWidth, video.videoHeight, { stream, track, settings, constraints, capabilities });
canvas.onclick = () => {
if (video.paused) video.play();
else video.pause();
log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });
dom.canvas.onclick = () => {
if (dom.video.paused) dom.video.play();
else dom.video.pause();
};
}
async function detectionLoop() {
const t0 = human.now();
if (!video.paused) result = await human.detect(video);
if (!dom.video.paused) {
result = await human.detect(dom.video);
}
const t1 = human.now();
fps.detect = 1000 / (t1 - t0);
requestAnimationFrame(detectionLoop);
@ -52,21 +73,26 @@ async function detectionLoop() {
async function drawLoop() {
const t0 = human.now();
if (!video.paused) {
if (!dom.video.paused) {
const interpolated = await human.next(result);
await human.draw.canvas(video, canvas);
await human.draw.all(canvas, interpolated);
await human.draw.canvas(dom.video, dom.canvas);
await human.draw.all(dom.canvas, interpolated);
perf(interpolated.performance);
}
const t1 = human.now();
fps.draw = 1000 / (t1 - t0);
status(video.paused ? 'paused' : `fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect / ${fps.draw.toFixed(1).padStart(5, ' ')} draw`);
status(dom.video.paused ? 'paused' : `fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect / ${fps.draw.toFixed(1).padStart(5, ' ')} draw`);
requestAnimationFrame(drawLoop);
}
async function main() {
log('human version:', human.version, 'tfjs:', human.tf.version_core);
log('platform:', human.env.platform, 'agent:', human.env.agent);
human.env.perfadd = true;
status('loading...');
await human.load();
status('initializing...');
log('backend:', human.tf.getBackend(), 'available:', human.env.backends);
await human.warmup();
await webCam();
await detectionLoop();

View File

@ -92,7 +92,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
parent.state = 'run:description';
timeStamp = now();
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
parent.performance.embedding = env.perfadd ? (parent.performance.embedding || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
parent.performance.description = env.perfadd ? (parent.performance.description || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
}
parent.analyze('End Description:');

View File

@ -1,6 +1,10 @@
/**
* Human main module
* @default Human Library
* @summary <https://github.com/vladmandic/human>
* @author <https://github.com/vladmandic>
* @copyright <https://github.com/vladmandic>
* @license MIT
*/
// module imports
@ -150,7 +154,7 @@ export class Human {
this.#numTensors = 0;
this.#analyzeMemoryLeaks = false;
this.#checkSanity = false;
this.performance = { backend: 0, load: 0, image: 0, frames: 0, cached: 0, changed: 0, total: 0, draw: 0 };
this.performance = {};
this.events = (typeof EventTarget !== 'undefined') ? new EventTarget() : undefined;
// object that contains all initialized models
this.models = new models.Models();
@ -310,7 +314,7 @@ export class Human {
}
const current = Math.trunc(now() - timeStamp);
if (current > (this.performance.load as number || 0)) this.performance.load = this.env.perfadd ? (this.performance.load || 0) + current : current;
if (current > (this.performance.loadModels as number || 0)) this.performance.loadModels = this.env.perfadd ? (this.performance.loadModels || 0) + current : current;
}
// emit event
@ -335,8 +339,12 @@ export class Human {
* @param userConfig?: {@link Config}
* @returns result: {@link Result}
*/
async warmup(userConfig?: Partial<Config>): Promise<Result | { error }> {
return warmups.warmup(this, userConfig) as Promise<Result | { error }>;
async warmup(userConfig?: Partial<Config>) {
const t0 = now();
const res = await warmups.warmup(this, userConfig);
const t1 = now();
this.performance.warmup = Math.trunc(t1 - t0);
return res;
}
/** Main detection method
@ -379,7 +387,7 @@ export class Human {
this.state = 'image';
const img = image.process(input, this.config) as { canvas: HTMLCanvasElement | OffscreenCanvas, tensor: Tensor };
this.process = img;
this.performance.image = this.env.perfadd ? (this.performance.image || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.performance.inputProcess = this.env.perfadd ? (this.performance.inputProcess || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze('Get Image:');
if (!img.tensor) {
@ -391,11 +399,11 @@ export class Human {
timeStamp = now();
this.config.skipAllowed = await image.skip(this.config, img.tensor);
if (!this.performance.frames) this.performance.frames = 0;
if (!this.performance.cached) this.performance.cached = 0;
(this.performance.frames as number)++;
if (this.config.skipAllowed) this.performance.cached++;
this.performance.changed = this.env.perfadd ? (this.performance.changed || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
if (!this.performance.totalFrames) this.performance.totalFrames = 0;
if (!this.performance.cachedFrames) this.performance.cachedFrames = 0;
(this.performance.totalFrames as number)++;
if (this.config.skipAllowed) this.performance.cachedFrames++;
this.performance.inputCheck = this.env.perfadd ? (this.performance.inputCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
this.analyze('Check Changed:');
// prepare where to store model results
@ -454,7 +462,7 @@ export class Human {
}
this.analyze('End Hand:');
// run nanodet
// run object detection
this.analyze('Start Object:');
this.state = 'detect:object';
if (this.config.async) {
@ -483,7 +491,7 @@ export class Human {
else if (this.performance.gesture) delete this.performance.gesture;
}
this.performance.total = Math.trunc(now() - timeStart);
this.performance.total = this.env.perfadd ? (this.performance.total || 0) + Math.trunc(now() - timeStart) : Math.trunc(now() - timeStart);
const shape = this.process?.tensor?.shape || [];
this.result = {
face: faceRes as FaceResult[],

View File

@ -99,7 +99,7 @@ export async function check(instance, force = false) {
// wait for ready
tf.enableProdMode();
await tf.ready();
instance.performance.backend = Math.trunc(now() - timeStamp);
instance.performance.initBackend = Math.trunc(now() - timeStamp);
instance.config.backend = tf.getBackend();
env.updateBackend(); // update env on backend init

View File

@ -68,6 +68,8 @@ export const options: DrawOptions = {
useCurves: <boolean>false,
};
let drawTime = 0;
const getCanvasContext = (input) => {
if (input && input.getContext) return input.getContext('2d');
throw new Error('invalid canvas');
@ -499,6 +501,7 @@ export async function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Par
gesture(inCanvas, result.gesture, localOptions), // gestures do not have buffering
// person(inCanvas, result.persons, localOptions); // already included above
]);
result.performance.draw = env.perfadd ? (result.performance.draw as number || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
drawTime = env.perfadd ? drawTime + Math.round(now() - timeStamp) : Math.round(now() - timeStamp);
result.performance.draw = drawTime;
return promise;
}

View File

@ -9,8 +9,10 @@ import * as moveNetCoords from '../body/movenetcoords';
import * as blazePoseCoords from '../body/blazeposecoords';
import * as efficientPoseCoords from '../body/efficientposecoords';
import { now } from './util';
import { env } from './env';
const bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
let interpolateTime = 0;
export function calc(newResult: Result, config: Config): Result {
const t0 = now();
@ -163,7 +165,8 @@ export function calc(newResult: Result, config: Config): Result {
// append interpolation performance data
const t1 = now();
if (newResult.performance) bufferedResult.performance = { ...newResult.performance, interpolate: Math.round(t1 - t0) };
interpolateTime = env.perfadd ? interpolateTime + Math.round(t1 - t0) : Math.round(t1 - t0);
if (newResult.performance) bufferedResult.performance = { ...newResult.performance, interpolate: interpolateTime };
return bufferedResult;
}