From 59058a0b93e6dade92828785548630ea7255134a Mon Sep 17 00:00:00 2001 From: Vladimir Mandic Date: Wed, 27 Oct 2021 09:45:38 -0400 Subject: [PATCH] additional human.performance counters --- CHANGELOG.md | 6 +++- demo/typescript/index.html | 4 ++- demo/typescript/index.js | 71 ++++++++++++++++++++++++++------------ demo/typescript/index.ts | 68 +++++++++++++++++++++++++----------- src/face/face.ts | 2 +- src/human.ts | 32 ++++++++++------- src/tfjs/backend.ts | 2 +- src/util/interpolate.ts | 5 ++- 8 files changed, 129 insertions(+), 61 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3c364063..c29d387a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,8 +9,12 @@ ## Changelog -### **HEAD -> main** 2021/10/26 mandic00@live.com +### **2.4.2** 2021/10/27 mandic00@live.com + +### **origin/main** 2021/10/27 mandic00@live.com + +- switch from es2018 to es2020 for main build - switch to custom tfjs for demos ### **release: 2.4.1** 2021/10/25 mandic00@live.com diff --git a/demo/typescript/index.html b/demo/typescript/index.html index 230a0a8f..bbea62b8 100644 --- a/demo/typescript/index.html +++ b/demo/typescript/index.html @@ -16,7 +16,7 @@ @@ -24,5 +24,7 @@

+    

+    
diff --git a/demo/typescript/index.js b/demo/typescript/index.js index 08fc7199..89f91e07 100644 --- a/demo/typescript/index.js +++ b/demo/typescript/index.js @@ -8,70 +8,95 @@ import Human from "../../dist/human.custom.esm.js"; var config = { modelBasePath: "../../models", - backend: "humangl" + backend: "humangl", + async: true }; var human = new Human(config); var result; -var video = document.getElementById("video"); -var canvas = document.getElementById("canvas"); -var fps = { detect: 0, draw: 0, element: document.getElementById("status") }; -var log = (...msg) => console.log(...msg); +var dom = { + video: document.getElementById("video"), + canvas: document.getElementById("canvas"), + log: document.getElementById("log"), + fps: document.getElementById("status"), + perf: document.getElementById("performance") +}; +var fps = { detect: 0, draw: 0 }; +var log = (...msg) => { + dom.log.innerText += msg.join(" ") + "\n"; + console.log(...msg); +}; var status = (msg) => { - if (fps.element) - fps.element.innerText = msg; + dom.fps.innerText = msg; +}; +var perf = (msg) => { + dom.perf.innerText = "performance: " + JSON.stringify(msg).replace(/"|{|}/g, "").replace(/,/g, " | "); }; async function webCam() { status("starting webcam..."); const options = { audio: false, video: { facingMode: "user", resizeMode: "none", width: { ideal: document.body.clientWidth } } }; const stream = await navigator.mediaDevices.getUserMedia(options); const ready = new Promise((resolve) => { - video.onloadeddata = () => resolve(true); + dom.video.onloadeddata = () => resolve(true); }); - video.srcObject = stream; - video.play(); + dom.video.srcObject = stream; + dom.video.play(); await ready; - canvas.width = video.videoWidth; - canvas.height = video.videoHeight; + dom.canvas.width = dom.video.videoWidth; + dom.canvas.height = dom.video.videoHeight; const track = stream.getVideoTracks()[0]; const capabilities = track.getCapabilities(); const settings = track.getSettings(); const constraints = track.getConstraints(); - log("video:", video.videoWidth, video.videoHeight, { stream, track, settings, constraints, capabilities }); - canvas.onclick = () => { - if (video.paused) - video.play(); + log("video:", dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities }); + dom.canvas.onclick = () => { + if (dom.video.paused) + dom.video.play(); else - video.pause(); + dom.video.pause(); }; } async function detectionLoop() { const t0 = human.now(); - if (!video.paused) - result = await human.detect(video); + if (!dom.video.paused) { + result = await human.detect(dom.video); + } const t1 = human.now(); fps.detect = 1e3 / (t1 - t0); requestAnimationFrame(detectionLoop); } async function drawLoop() { const t0 = human.now(); - if (!video.paused) { + if (!dom.video.paused) { const interpolated = await human.next(result); - await human.draw.canvas(video, canvas); - await human.draw.all(canvas, interpolated); + await human.draw.canvas(dom.video, dom.canvas); + await human.draw.all(dom.canvas, interpolated); + perf(interpolated.performance); } const t1 = human.now(); fps.draw = 1e3 / (t1 - t0); - status(video.paused ? "paused" : `fps: ${fps.detect.toFixed(1).padStart(5, " ")} detect / ${fps.draw.toFixed(1).padStart(5, " ")} draw`); + status(dom.video.paused ? "paused" : `fps: ${fps.detect.toFixed(1).padStart(5, " ")} detect / ${fps.draw.toFixed(1).padStart(5, " ")} draw`); requestAnimationFrame(drawLoop); } async function main() { + log("human version:", human.version, "tfjs:", human.tf.version_core); + log("platform:", human.env.platform, "agent:", human.env.agent); + human.env.perfadd = true; status("loading..."); await human.load(); status("initializing..."); + log("backend:", human.tf.getBackend(), "available:", human.env.backends); await human.warmup(); await webCam(); await detectionLoop(); await drawLoop(); } window.onload = main; +/** + * Human demo for browsers + * @default Human Library + * @summary + * @author + * @copyright + * @license MIT + */ //# sourceMappingURL=index.js.map diff --git a/demo/typescript/index.ts b/demo/typescript/index.ts index 312c4e8b..d07e4f50 100644 --- a/demo/typescript/index.ts +++ b/demo/typescript/index.ts @@ -1,6 +1,10 @@ /** * Human demo for browsers - * @description Simple Human demo for browsers using WebCam + * @default Human Library + * @summary + * @author + * @copyright + * @license MIT */ import Human from '../../dist/human.custom.esm.js'; // equivalent of @vladmandic/human @@ -8,43 +12,60 @@ import Human from '../../dist/human.custom.esm.js'; // equivalent of @vladmandic const config = { modelBasePath: '../../models', backend: 'humangl', + async: true, }; const human = new Human(config); let result; -const video = document.getElementById('video') as HTMLVideoElement; -const canvas = document.getElementById('canvas') as HTMLCanvasElement; -const fps = { detect: 0, draw: 0, element: document.getElementById('status') }; +const dom = { + video: document.getElementById('video') as HTMLVideoElement, + canvas: document.getElementById('canvas') as HTMLCanvasElement, + log: document.getElementById('log') as HTMLPreElement, + fps: document.getElementById('status') as HTMLPreElement, + perf: document.getElementById('performance') as HTMLDivElement, +}; -// eslint-disable-next-line no-console -const log = (...msg) => console.log(...msg); -const status = (msg) => { if (fps.element) fps.element.innerText = msg; }; +const fps = { detect: 0, draw: 0 }; + +const log = (...msg) => { + dom.log.innerText += msg.join(' ') + '\n'; + // eslint-disable-next-line no-console + console.log(...msg); +}; +const status = (msg) => { + dom.fps.innerText = msg; +}; +const perf = (msg) => { + dom.perf.innerText = 'performance: ' + JSON.stringify(msg).replace(/"|{|}/g, '').replace(/,/g, ' | '); +}; async function webCam() { status('starting webcam...'); const options = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } }; const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options); - const ready = new Promise((resolve) => { video.onloadeddata = () => resolve(true); }); - video.srcObject = stream; - video.play(); + const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); }); + dom.video.srcObject = stream; + dom.video.play(); await ready; - canvas.width = video.videoWidth; - canvas.height = video.videoHeight; + dom.canvas.width = dom.video.videoWidth; + dom.canvas.height = dom.video.videoHeight; const track: MediaStreamTrack = stream.getVideoTracks()[0]; const capabilities: MediaTrackCapabilities = track.getCapabilities(); const settings: MediaTrackSettings = track.getSettings(); const constraints: MediaTrackConstraints = track.getConstraints(); - log('video:', video.videoWidth, video.videoHeight, { stream, track, settings, constraints, capabilities }); - canvas.onclick = () => { - if (video.paused) video.play(); - else video.pause(); + log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities }); + dom.canvas.onclick = () => { + if (dom.video.paused) dom.video.play(); + else dom.video.pause(); }; } async function detectionLoop() { const t0 = human.now(); - if (!video.paused) result = await human.detect(video); + if (!dom.video.paused) { + result = await human.detect(dom.video); + } const t1 = human.now(); fps.detect = 1000 / (t1 - t0); requestAnimationFrame(detectionLoop); @@ -52,21 +73,26 @@ async function detectionLoop() { async function drawLoop() { const t0 = human.now(); - if (!video.paused) { + if (!dom.video.paused) { const interpolated = await human.next(result); - await human.draw.canvas(video, canvas); - await human.draw.all(canvas, interpolated); + await human.draw.canvas(dom.video, dom.canvas); + await human.draw.all(dom.canvas, interpolated); + perf(interpolated.performance); } const t1 = human.now(); fps.draw = 1000 / (t1 - t0); - status(video.paused ? 'paused' : `fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect / ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); + status(dom.video.paused ? 'paused' : `fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect / ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); requestAnimationFrame(drawLoop); } async function main() { + log('human version:', human.version, 'tfjs:', human.tf.version_core); + log('platform:', human.env.platform, 'agent:', human.env.agent); + human.env.perfadd = true; status('loading...'); await human.load(); status('initializing...'); + log('backend:', human.tf.getBackend(), 'available:', human.env.backends); await human.warmup(); await webCam(); await detectionLoop(); diff --git a/src/face/face.ts b/src/face/face.ts index d733f3bf..deecf525 100644 --- a/src/face/face.ts +++ b/src/face/face.ts @@ -92,7 +92,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor): parent.state = 'run:description'; timeStamp = now(); descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null; - parent.performance.embedding = env.perfadd ? (parent.performance.embedding || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp); + parent.performance.description = env.perfadd ? (parent.performance.description || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp); } parent.analyze('End Description:'); diff --git a/src/human.ts b/src/human.ts index 7b589942..0c0ea2b4 100644 --- a/src/human.ts +++ b/src/human.ts @@ -1,6 +1,10 @@ /** * Human main module + * @default Human Library + * @summary * @author + * @copyright + * @license MIT */ // module imports @@ -150,7 +154,7 @@ export class Human { this.#numTensors = 0; this.#analyzeMemoryLeaks = false; this.#checkSanity = false; - this.performance = { backend: 0, load: 0, image: 0, frames: 0, cached: 0, changed: 0, total: 0, draw: 0 }; + this.performance = {}; this.events = (typeof EventTarget !== 'undefined') ? new EventTarget() : undefined; // object that contains all initialized models this.models = new models.Models(); @@ -310,7 +314,7 @@ export class Human { } const current = Math.trunc(now() - timeStamp); - if (current > (this.performance.load as number || 0)) this.performance.load = this.env.perfadd ? (this.performance.load || 0) + current : current; + if (current > (this.performance.loadModels as number || 0)) this.performance.loadModels = this.env.perfadd ? (this.performance.loadModels || 0) + current : current; } // emit event @@ -335,8 +339,12 @@ export class Human { * @param userConfig?: {@link Config} * @returns result: {@link Result} */ - async warmup(userConfig?: Partial): Promise { - return warmups.warmup(this, userConfig) as Promise; + async warmup(userConfig?: Partial) { + const t0 = now(); + const res = await warmups.warmup(this, userConfig); + const t1 = now(); + this.performance.warmup = Math.trunc(t1 - t0); + return res; } /** Main detection method @@ -379,7 +387,7 @@ export class Human { this.state = 'image'; const img = image.process(input, this.config) as { canvas: HTMLCanvasElement | OffscreenCanvas, tensor: Tensor }; this.process = img; - this.performance.image = this.env.perfadd ? (this.performance.image || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp); + this.performance.inputProcess = this.env.perfadd ? (this.performance.inputProcess || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp); this.analyze('Get Image:'); if (!img.tensor) { @@ -391,11 +399,11 @@ export class Human { timeStamp = now(); this.config.skipAllowed = await image.skip(this.config, img.tensor); - if (!this.performance.frames) this.performance.frames = 0; - if (!this.performance.cached) this.performance.cached = 0; - (this.performance.frames as number)++; - if (this.config.skipAllowed) this.performance.cached++; - this.performance.changed = this.env.perfadd ? (this.performance.changed || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp); + if (!this.performance.totalFrames) this.performance.totalFrames = 0; + if (!this.performance.cachedFrames) this.performance.cachedFrames = 0; + (this.performance.totalFrames as number)++; + if (this.config.skipAllowed) this.performance.cachedFrames++; + this.performance.inputCheck = this.env.perfadd ? (this.performance.inputCheck || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp); this.analyze('Check Changed:'); // prepare where to store model results @@ -454,7 +462,7 @@ export class Human { } this.analyze('End Hand:'); - // run nanodet + // run object detection this.analyze('Start Object:'); this.state = 'detect:object'; if (this.config.async) { @@ -483,7 +491,7 @@ export class Human { else if (this.performance.gesture) delete this.performance.gesture; } - this.performance.total = Math.trunc(now() - timeStart); + this.performance.total = this.env.perfadd ? (this.performance.total || 0) + Math.trunc(now() - timeStart) : Math.trunc(now() - timeStart); const shape = this.process?.tensor?.shape || []; this.result = { face: faceRes as FaceResult[], diff --git a/src/tfjs/backend.ts b/src/tfjs/backend.ts index d60fa31d..83dbed4c 100644 --- a/src/tfjs/backend.ts +++ b/src/tfjs/backend.ts @@ -99,7 +99,7 @@ export async function check(instance, force = false) { // wait for ready tf.enableProdMode(); await tf.ready(); - instance.performance.backend = Math.trunc(now() - timeStamp); + instance.performance.initBackend = Math.trunc(now() - timeStamp); instance.config.backend = tf.getBackend(); env.updateBackend(); // update env on backend init diff --git a/src/util/interpolate.ts b/src/util/interpolate.ts index a01500d8..31f4f438 100644 --- a/src/util/interpolate.ts +++ b/src/util/interpolate.ts @@ -9,8 +9,10 @@ import * as moveNetCoords from '../body/movenetcoords'; import * as blazePoseCoords from '../body/blazeposecoords'; import * as efficientPoseCoords from '../body/efficientposecoords'; import { now } from './util'; +import { env } from './env'; const bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 }; +let interpolateTime = 0; export function calc(newResult: Result, config: Config): Result { const t0 = now(); @@ -163,7 +165,8 @@ export function calc(newResult: Result, config: Config): Result { // append interpolation performance data const t1 = now(); - if (newResult.performance) bufferedResult.performance = { ...newResult.performance, interpolate: Math.round(t1 - t0) }; + interpolateTime = env.perfadd ? interpolateTime + Math.round(t1 - t0) : Math.round(t1 - t0); + if (newResult.performance) bufferedResult.performance = { ...newResult.performance, interpolate: interpolateTime }; return bufferedResult; }