mirror of https://github.com/vladmandic/human
modularize human class and add model validation
parent
afd01e287f
commit
8862bb1277
|
@ -11,6 +11,8 @@
|
|||
|
||||
### **HEAD -> main** 2021/09/12 mandic00@live.com
|
||||
|
||||
- add dynamic kernel op detection
|
||||
- added human.env diagnostic class
|
||||
- minor typos
|
||||
- release candidate
|
||||
- parametrize face config
|
||||
|
|
|
@ -47,7 +47,7 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) ap
|
|||
- [**Code Repository**](https://github.com/vladmandic/human)
|
||||
- [**NPM Package**](https://www.npmjs.com/package/@vladmandic/human)
|
||||
- [**Issues Tracker**](https://github.com/vladmandic/human/issues)
|
||||
- [**TypeDoc API Specification: Human**](https://vladmandic.github.io/human/typedoc/classes/Human.html)
|
||||
- [**TypeDoc API Specification**](https://vladmandic.github.io/human/typedoc/classes/Human.html)
|
||||
- [**Change Log**](https://github.com/vladmandic/human/blob/main/CHANGELOG.md)
|
||||
- [**Current To-do List**](https://github.com/vladmandic/human/blob/main/TODO.md)
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -6,6 +6,5 @@ Source code of the `Human` library
|
|||
- Compiled typings are present in `/types`
|
||||
- Extracted API specification is present in `/typedoc`
|
||||
|
||||
For details how to build the `Human` library see Wiki
|
||||
|
||||
- [**Build Process**](https://github.com/vladmandic/human/wiki/Build-Process)
|
||||
[**Build Process**](https://github.com/vladmandic/human/wiki/Build-Process)
|
||||
[**TypeDoc API Specification**](https://vladmandic.github.io/human/typedoc/classes/Human.html)
|
||||
|
|
340
src/human.ts
340
src/human.ts
|
@ -6,7 +6,6 @@ import { log, now, mergeDeep } from './helpers';
|
|||
import { Config, defaults } from './config';
|
||||
import { Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult } from './result';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as backend from './tfjs/backend';
|
||||
import * as models from './models';
|
||||
import * as face from './face';
|
||||
import * as facemesh from './blazeface/facemesh';
|
||||
|
@ -24,9 +23,10 @@ import * as image from './image/image';
|
|||
import * as draw from './draw/draw';
|
||||
import * as persons from './persons';
|
||||
import * as interpolate from './interpolate';
|
||||
import * as sample from './sample';
|
||||
import * as env from './env';
|
||||
import * as backend from './tfjs/backend';
|
||||
import * as app from '../package.json';
|
||||
import * as warmups from './warmup';
|
||||
import { Tensor, GraphModel } from './tfjs/types';
|
||||
|
||||
// export types
|
||||
|
@ -86,8 +86,6 @@ export class Human {
|
|||
* - Progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle'
|
||||
*/
|
||||
state: string;
|
||||
/** process input and return tensor and canvas */
|
||||
image: typeof image.process;
|
||||
/** currenty processed image tensor and canvas */
|
||||
process: { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null };
|
||||
/** @internal: Instance of TensorFlow/JS used by Human
|
||||
|
@ -151,9 +149,7 @@ export class Human {
|
|||
#numTensors: number;
|
||||
#analyzeMemoryLeaks: boolean;
|
||||
#checkSanity: boolean;
|
||||
#firstRun: boolean;
|
||||
#lastInputSum: number;
|
||||
#lastCacheDiff: number;
|
||||
initial: boolean;
|
||||
|
||||
// definition end
|
||||
|
||||
|
@ -176,18 +172,17 @@ export class Human {
|
|||
this.#numTensors = 0;
|
||||
this.#analyzeMemoryLeaks = false;
|
||||
this.#checkSanity = false;
|
||||
this.#firstRun = true;
|
||||
this.#lastCacheDiff = 0;
|
||||
this.initial = true;
|
||||
this.performance = { backend: 0, load: 0, image: 0, frames: 0, cached: 0, changed: 0, total: 0, draw: 0 };
|
||||
this.events = new EventTarget();
|
||||
// object that contains all initialized models
|
||||
this.models = {
|
||||
face: null,
|
||||
face: null, // array of models
|
||||
posenet: null,
|
||||
blazepose: null,
|
||||
efficientpose: null,
|
||||
movenet: null,
|
||||
handpose: null,
|
||||
handpose: null, // array of models
|
||||
age: null,
|
||||
gender: null,
|
||||
emotion: null,
|
||||
|
@ -200,14 +195,12 @@ export class Human {
|
|||
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
|
||||
// export access to image this.processing
|
||||
// @ts-ignore eslint-typescript cannot correctly infer type in anonymous function
|
||||
this.image = (input: Input) => image.process(input, this.config);
|
||||
this.process = { tensor: null, canvas: null };
|
||||
// export raw access to underlying models
|
||||
this.faceTriangulation = facemesh.triangulation;
|
||||
this.faceUVMap = facemesh.uvmap;
|
||||
// include platform info
|
||||
this.#lastInputSum = 1;
|
||||
this.#emit('create');
|
||||
this.emit('create');
|
||||
}
|
||||
|
||||
// helper function: measure tensor leak
|
||||
|
@ -235,6 +228,13 @@ export class Human {
|
|||
return null;
|
||||
}
|
||||
|
||||
/** Process input as return canvas and tensor
|
||||
*
|
||||
* @param input: {@link Input}
|
||||
* @returns { tensor, canvas }
|
||||
*/
|
||||
image = (input: Input) => image.process(input, this.config);
|
||||
|
||||
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
||||
* - Calculation is based on normalized Minkowski distance between
|
||||
*
|
||||
|
@ -290,12 +290,11 @@ export class Human {
|
|||
const count = Object.values(this.models).filter((model) => model).length;
|
||||
if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config;
|
||||
|
||||
if (this.#firstRun) { // print version info on first run and check for correct backend setup
|
||||
if (this.initial) { // print version info on first run and check for correct backend setup
|
||||
if (this.config.debug) log(`version: ${this.version}`);
|
||||
if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`);
|
||||
// if (this.config.debug) log('environment:', this.env);
|
||||
|
||||
await this.#checkBackend();
|
||||
await backend.check(this);
|
||||
await tf.ready();
|
||||
if (this.env.browser) {
|
||||
if (this.config.debug) log('configuration:', this.config);
|
||||
if (this.config.debug) log('tf flags:', this.tf.ENV.flags);
|
||||
|
@ -304,123 +303,22 @@ export class Human {
|
|||
|
||||
await models.load(this); // actually loads models
|
||||
|
||||
if (this.#firstRun) { // print memory stats on first run
|
||||
if (this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors');
|
||||
this.#firstRun = false;
|
||||
}
|
||||
if (this.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run
|
||||
this.initial = false;
|
||||
|
||||
const loaded = Object.values(this.models).filter((model) => model).length;
|
||||
if (loaded !== count) this.#emit('load');
|
||||
if (loaded !== count) { // number of loaded models changed
|
||||
await models.validate(this); // validate kernel ops used by model against current backend
|
||||
this.emit('load');
|
||||
}
|
||||
|
||||
const current = Math.trunc(now() - timeStamp);
|
||||
if (current > (this.performance.load as number || 0)) this.performance.load = current;
|
||||
}
|
||||
|
||||
// emit event
|
||||
/** @hidden */
|
||||
#emit = (event: string) => this.events?.dispatchEvent(new Event(event));
|
||||
|
||||
// check if backend needs initialization if it changed
|
||||
/** @hidden */
|
||||
#checkBackend = async () => {
|
||||
if (this.#firstRun || (this.config.backend && (this.config.backend.length > 0) && (this.tf.getBackend() !== this.config.backend))) {
|
||||
const timeStamp = now();
|
||||
this.state = 'backend';
|
||||
/* force backend reload
|
||||
if (this.config.backend in tf.engine().registry) {
|
||||
const backendFactory = tf.findBackendFactory(this.config.backend);
|
||||
tf.removeBackend(this.config.backend);
|
||||
tf.registerBackend(this.config.backend, backendFactory);
|
||||
} else {
|
||||
log('Backend not registred:', this.config.backend);
|
||||
}
|
||||
*/
|
||||
|
||||
if (this.config.backend && this.config.backend.length > 0) {
|
||||
// detect web worker
|
||||
// @ts-ignore ignore missing type for WorkerGlobalScope as that is the point
|
||||
if (typeof window === 'undefined' && typeof WorkerGlobalScope !== 'undefined' && this.config.debug) {
|
||||
log('running inside web worker');
|
||||
}
|
||||
|
||||
// force browser vs node backend
|
||||
if (this.env.browser && this.config.backend === 'tensorflow') {
|
||||
log('override: backend set to tensorflow while running in browser');
|
||||
this.config.backend = 'humangl';
|
||||
}
|
||||
if (this.env.node && (this.config.backend === 'webgl' || this.config.backend === 'humangl')) {
|
||||
log(`override: backend set to ${this.config.backend} while running in nodejs`);
|
||||
this.config.backend = 'tensorflow';
|
||||
}
|
||||
|
||||
// handle webgpu
|
||||
if (this.env.browser && this.config.backend === 'webgpu') {
|
||||
if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') {
|
||||
log('override: backend set to webgpu but browser does not support webgpu');
|
||||
this.config.backend = 'humangl';
|
||||
} else {
|
||||
const adapter = await navigator['gpu'].requestAdapter();
|
||||
if (this.config.debug) log('enumerated webgpu adapter:', adapter);
|
||||
}
|
||||
}
|
||||
|
||||
// check available backends
|
||||
if (this.config.backend === 'humangl') backend.register();
|
||||
const available = Object.keys(this.tf.engine().registryFactory);
|
||||
if (this.config.debug) log('available backends:', available);
|
||||
|
||||
if (!available.includes(this.config.backend)) {
|
||||
log(`error: backend ${this.config.backend} not found in registry`);
|
||||
this.config.backend = this.env.node ? 'tensorflow' : 'humangl';
|
||||
log(`override: setting backend ${this.config.backend}`);
|
||||
}
|
||||
|
||||
if (this.config.debug) log('setting backend:', this.config.backend);
|
||||
|
||||
// handle wasm
|
||||
if (this.config.backend === 'wasm') {
|
||||
if (this.config.debug) log('wasm path:', this.config.wasmPath);
|
||||
if (typeof this.tf?.setWasmPaths !== 'undefined') this.tf.setWasmPaths(this.config.wasmPath);
|
||||
else throw new Error('Human: WASM backend is not loaded');
|
||||
const simd = await this.tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
|
||||
const mt = await this.tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
|
||||
if (this.config.debug) log(`wasm execution: ${simd ? 'SIMD' : 'no SIMD'} ${mt ? 'multithreaded' : 'singlethreaded'}`);
|
||||
if (this.config.debug && !simd) log('warning: wasm simd support is not enabled');
|
||||
}
|
||||
|
||||
// handle humangl
|
||||
try {
|
||||
await this.tf.setBackend(this.config.backend);
|
||||
} catch (err) {
|
||||
log('error: cannot set backend:', this.config.backend, err);
|
||||
}
|
||||
}
|
||||
|
||||
// handle webgl & humangl
|
||||
if (this.tf.getBackend() === 'humangl') {
|
||||
this.tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
|
||||
this.tf.ENV.set('WEBGL_CPU_FORWARD', true);
|
||||
this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', false);
|
||||
this.tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true);
|
||||
// if (!this.config.object.enabled) this.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
|
||||
if (typeof this.config['deallocate'] !== 'undefined' && this.config['deallocate']) { // hidden param
|
||||
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true);
|
||||
this.tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0);
|
||||
}
|
||||
// @ts-ignore getGPGPUContext only exists on WebGL backend
|
||||
const gl = await this.tf.backend().getGPGPUContext().gl;
|
||||
if (this.config.debug) log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
|
||||
}
|
||||
|
||||
// wait for ready
|
||||
this.tf.enableProdMode();
|
||||
await this.tf.ready();
|
||||
this.performance.backend = Math.trunc(now() - timeStamp);
|
||||
this.config.backend = this.tf.getBackend();
|
||||
|
||||
env.get(); // update env on backend init
|
||||
this.env = env.env;
|
||||
}
|
||||
}
|
||||
emit = (event: string) => this.events?.dispatchEvent(new Event(event));
|
||||
|
||||
/**
|
||||
* Runs interpolation using last known result and returns smoothened result
|
||||
|
@ -431,43 +329,20 @@ export class Human {
|
|||
*/
|
||||
next = (result?: Result) => interpolate.calc(result || this.result) as Result;
|
||||
|
||||
// check if input changed sufficiently to trigger new detections
|
||||
/** @hidden */
|
||||
#skipFrame = async (input: Tensor) => {
|
||||
if (this.config.cacheSensitivity === 0) return false;
|
||||
const resizeFact = 32;
|
||||
if (!input.shape[1] || !input.shape[2]) return false;
|
||||
const reduced: Tensor = tf.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
|
||||
// use tensor sum
|
||||
/*
|
||||
const sumT = this.tf.sum(reduced);
|
||||
const sum = await sumT.data()[0] as number;
|
||||
sumT.dispose();
|
||||
*/
|
||||
// use js loop sum, faster than uploading tensor to gpu calculating and downloading back
|
||||
const reducedData = await reduced.data(); // raw image rgb array
|
||||
let sum = 0;
|
||||
for (let i = 0; i < reducedData.length / 3; i++) sum += reducedData[3 * i + 2]; // look only at green value of each pixel
|
||||
|
||||
reduced.dispose();
|
||||
const diff = 100 * (Math.max(sum, this.#lastInputSum) / Math.min(sum, this.#lastInputSum) - 1);
|
||||
this.#lastInputSum = sum;
|
||||
// if previous frame was skipped, skip this frame if changed more than cacheSensitivity
|
||||
// if previous frame was not skipped, then look for cacheSensitivity or difference larger than one in previous frame to avoid resetting cache in subsequent frames unnecessarily
|
||||
const skipFrame = diff < Math.max(this.config.cacheSensitivity, this.#lastCacheDiff);
|
||||
// if difference is above 10x threshold, don't use last value to force reset cache for significant change of scenes or images
|
||||
this.#lastCacheDiff = diff > 10 * this.config.cacheSensitivity ? 0 : diff;
|
||||
// console.log('skipFrame', skipFrame, this.config.cacheSensitivity, diff);
|
||||
return skipFrame;
|
||||
}
|
||||
/** Warmup method pre-initializes all configured models for faster inference
|
||||
* - can take significant time on startup
|
||||
* - only used for `webgl` and `humangl` backends
|
||||
* @param userConfig?: {@link Config}
|
||||
*/
|
||||
warmup = (userConfig?: Partial<Config>) => warmups.warmup(this, userConfig) as Promise<Result | { error }>
|
||||
|
||||
/** Main detection method
|
||||
* - Analyze configuration: {@link Config}
|
||||
* - Pre-this.process input: {@link Input}
|
||||
* - Run inference for all configured models
|
||||
* - this.process and return result: {@link Result}
|
||||
* - Process and return result: {@link Result}
|
||||
*
|
||||
* @param input: Input
|
||||
* @param input: {@link Input}
|
||||
* @param userConfig?: {@link Config}
|
||||
* @returns result: {@link Result}
|
||||
*/
|
||||
|
@ -491,19 +366,20 @@ export class Human {
|
|||
|
||||
const timeStart = now();
|
||||
|
||||
// configure backend
|
||||
await this.#checkBackend();
|
||||
// configure backend if needed
|
||||
await backend.check(this);
|
||||
|
||||
// load models if enabled
|
||||
await this.load();
|
||||
|
||||
timeStamp = now();
|
||||
this.process = image.process(input, this.config);
|
||||
const inputTensor = this.process.tensor;
|
||||
this.performance.image = Math.trunc(now() - timeStamp);
|
||||
this.analyze('Get Image:');
|
||||
|
||||
// run segmentation prethis.processing
|
||||
if (this.config.segmentation.enabled && this.process && this.process.tensor) {
|
||||
if (this.config.segmentation.enabled && this.process && inputTensor) {
|
||||
this.analyze('Start Segmentation:');
|
||||
this.state = 'run:segmentation';
|
||||
timeStamp = now();
|
||||
|
@ -512,21 +388,21 @@ export class Human {
|
|||
if (elapsedTime > 0) this.performance.segmentation = elapsedTime;
|
||||
if (this.process.canvas) {
|
||||
// replace input
|
||||
tf.dispose(this.process.tensor);
|
||||
tf.dispose(inputTensor);
|
||||
this.process = image.process(this.process.canvas, this.config);
|
||||
}
|
||||
this.analyze('End Segmentation:');
|
||||
}
|
||||
|
||||
if (!this.process || !this.process.tensor) {
|
||||
if (!this.process || !inputTensor) {
|
||||
log('could not convert input to tensor');
|
||||
resolve({ error: 'could not convert input to tensor' });
|
||||
return;
|
||||
}
|
||||
this.#emit('image');
|
||||
this.emit('image');
|
||||
|
||||
timeStamp = now();
|
||||
this.config.skipFrame = await this.#skipFrame(this.process.tensor);
|
||||
this.config.skipFrame = await image.skip(this, inputTensor);
|
||||
if (!this.performance.frames) this.performance.frames = 0;
|
||||
if (!this.performance.cached) this.performance.cached = 0;
|
||||
(this.performance.frames as number)++;
|
||||
|
@ -543,12 +419,12 @@ export class Human {
|
|||
|
||||
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
|
||||
if (this.config.async) {
|
||||
faceRes = this.config.face.enabled ? face.detectFace(this, this.process.tensor) : [];
|
||||
faceRes = this.config.face.enabled ? face.detectFace(this, inputTensor) : [];
|
||||
if (this.performance.face) delete this.performance.face;
|
||||
} else {
|
||||
this.state = 'run:face';
|
||||
timeStamp = now();
|
||||
faceRes = this.config.face.enabled ? await face.detectFace(this, this.process.tensor) : [];
|
||||
faceRes = this.config.face.enabled ? await face.detectFace(this, inputTensor) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0) this.performance.face = elapsedTime;
|
||||
}
|
||||
|
@ -556,18 +432,18 @@ export class Human {
|
|||
// run body: can be posenet, blazepose, efficientpose, movenet
|
||||
this.analyze('Start Body:');
|
||||
if (this.config.async) {
|
||||
if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? posenet.predict(this.process.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(this.process.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('efficientpose')) bodyRes = this.config.body.enabled ? efficientpose.predict(this.process.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? movenet.predict(this.process.tensor, this.config) : [];
|
||||
if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? posenet.predict(inputTensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(inputTensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('efficientpose')) bodyRes = this.config.body.enabled ? efficientpose.predict(inputTensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? movenet.predict(inputTensor, this.config) : [];
|
||||
if (this.performance.body) delete this.performance.body;
|
||||
} else {
|
||||
this.state = 'run:body';
|
||||
timeStamp = now();
|
||||
if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(this.process.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(this.process.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('efficientpose')) bodyRes = this.config.body.enabled ? await efficientpose.predict(this.process.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? await movenet.predict(this.process.tensor, this.config) : [];
|
||||
if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(inputTensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(inputTensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('efficientpose')) bodyRes = this.config.body.enabled ? await efficientpose.predict(inputTensor, this.config) : [];
|
||||
else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? await movenet.predict(inputTensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0) this.performance.body = elapsedTime;
|
||||
}
|
||||
|
@ -576,12 +452,12 @@ export class Human {
|
|||
// run handpose
|
||||
this.analyze('Start Hand:');
|
||||
if (this.config.async) {
|
||||
handRes = this.config.hand.enabled ? handpose.predict(this.process.tensor, this.config) : [];
|
||||
handRes = this.config.hand.enabled ? handpose.predict(inputTensor, this.config) : [];
|
||||
if (this.performance.hand) delete this.performance.hand;
|
||||
} else {
|
||||
this.state = 'run:hand';
|
||||
timeStamp = now();
|
||||
handRes = this.config.hand.enabled ? await handpose.predict(this.process.tensor, this.config) : [];
|
||||
handRes = this.config.hand.enabled ? await handpose.predict(inputTensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0) this.performance.hand = elapsedTime;
|
||||
}
|
||||
|
@ -590,14 +466,14 @@ export class Human {
|
|||
// run nanodet
|
||||
this.analyze('Start Object:');
|
||||
if (this.config.async) {
|
||||
if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? nanodet.predict(this.process.tensor, this.config) : [];
|
||||
else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? centernet.predict(this.process.tensor, this.config) : [];
|
||||
if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? nanodet.predict(inputTensor, this.config) : [];
|
||||
else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? centernet.predict(inputTensor, this.config) : [];
|
||||
if (this.performance.object) delete this.performance.object;
|
||||
} else {
|
||||
this.state = 'run:object';
|
||||
timeStamp = now();
|
||||
if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(this.process.tensor, this.config) : [];
|
||||
else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(this.process.tensor, this.config) : [];
|
||||
if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(inputTensor, this.config) : [];
|
||||
else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(inputTensor, this.config) : [];
|
||||
elapsedTime = Math.trunc(now() - timeStamp);
|
||||
if (elapsedTime > 0) this.performance.object = elapsedTime;
|
||||
}
|
||||
|
@ -631,111 +507,13 @@ export class Human {
|
|||
};
|
||||
|
||||
// finally dispose input tensor
|
||||
tf.dispose(this.process.tensor);
|
||||
tf.dispose(inputTensor);
|
||||
|
||||
// log('Result:', result);
|
||||
this.#emit('detect');
|
||||
this.emit('detect');
|
||||
resolve(this.result);
|
||||
});
|
||||
}
|
||||
|
||||
/** @hidden */
|
||||
#warmupBitmap = async () => {
|
||||
const b64toBlob = (base64: string, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob());
|
||||
let blob;
|
||||
let res;
|
||||
switch (this.config.warmup) {
|
||||
case 'face': blob = await b64toBlob(sample.face); break;
|
||||
case 'full': blob = await b64toBlob(sample.body); break;
|
||||
default: blob = null;
|
||||
}
|
||||
if (blob) {
|
||||
const bitmap = await createImageBitmap(blob);
|
||||
res = await this.detect(bitmap, this.config);
|
||||
bitmap.close();
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/** @hidden */
|
||||
#warmupCanvas = async () => new Promise((resolve) => {
|
||||
let src;
|
||||
let size = 0;
|
||||
switch (this.config.warmup) {
|
||||
case 'face':
|
||||
size = 256;
|
||||
src = 'data:image/jpeg;base64,' + sample.face;
|
||||
break;
|
||||
case 'full':
|
||||
case 'body':
|
||||
size = 1200;
|
||||
src = 'data:image/jpeg;base64,' + sample.body;
|
||||
break;
|
||||
default:
|
||||
src = null;
|
||||
}
|
||||
// src = encodeURI('../assets/human-sample-upper.jpg');
|
||||
const img = new Image();
|
||||
img.onload = async () => {
|
||||
const canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(size, size) : document.createElement('canvas');
|
||||
canvas.width = img.naturalWidth;
|
||||
canvas.height = img.naturalHeight;
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx?.drawImage(img, 0, 0);
|
||||
// const data = ctx?.getImageData(0, 0, canvas.height, canvas.width);
|
||||
const res = await this.detect(canvas, this.config);
|
||||
resolve(res);
|
||||
};
|
||||
if (src) img.src = src;
|
||||
else resolve(null);
|
||||
});
|
||||
|
||||
/** @hidden */
|
||||
#warmupNode = async () => {
|
||||
const atob = (str: string) => Buffer.from(str, 'base64');
|
||||
let img;
|
||||
if (this.config.warmup === 'face') img = atob(sample.face);
|
||||
if (this.config.warmup === 'body' || this.config.warmup === 'full') img = atob(sample.body);
|
||||
if (!img) return null;
|
||||
let res;
|
||||
if (typeof tf['node'] !== 'undefined') {
|
||||
const data = tf['node'].decodeJpeg(img);
|
||||
const expanded = data.expandDims(0);
|
||||
this.tf.dispose(data);
|
||||
// log('Input:', expanded);
|
||||
res = await this.detect(expanded, this.config);
|
||||
this.tf.dispose(expanded);
|
||||
} else {
|
||||
if (this.config.debug) log('Warmup tfjs-node not loaded');
|
||||
/*
|
||||
const input = await canvasJS.loadImage(img);
|
||||
const canvas = canvasJS.createCanvas(input.width, input.height);
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.drawImage(img, 0, 0, input.width, input.height);
|
||||
res = await this.detect(input, this.config);
|
||||
*/
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/** Warmup method pre-initializes all configured models for faster inference
|
||||
* - can take significant time on startup
|
||||
* - only used for `webgl` and `humangl` backends
|
||||
* @param userConfig?: Config
|
||||
*/
|
||||
async warmup(userConfig?: Partial<Config>): Promise<Result | { error }> {
|
||||
const t0 = now();
|
||||
if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config;
|
||||
if (!this.config.warmup || this.config.warmup === 'none') return { error: 'null' };
|
||||
let res;
|
||||
if (typeof createImageBitmap === 'function') res = await this.#warmupBitmap();
|
||||
else if (typeof Image !== 'undefined') res = await this.#warmupCanvas();
|
||||
else res = await this.#warmupNode();
|
||||
const t1 = now();
|
||||
if (this.config.debug) log('Warmup', this.config.warmup, Math.round(t1 - t0), 'ms', res);
|
||||
this.#emit('warmup');
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -177,3 +177,33 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
|
|||
const canvas = config.filter.return ? outCanvas : null;
|
||||
return { tensor, canvas };
|
||||
}
|
||||
|
||||
let lastInputSum = 0;
|
||||
let lastCacheDiff = 1;
|
||||
export async function skip(instance, input: Tensor) {
|
||||
if (instance.config.cacheSensitivity === 0) return false;
|
||||
const resizeFact = 32;
|
||||
if (!input.shape[1] || !input.shape[2]) return false;
|
||||
const reduced: Tensor = tf.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
|
||||
// use tensor sum
|
||||
/*
|
||||
const sumT = this.tf.sum(reduced);
|
||||
const sum = await sumT.data()[0] as number;
|
||||
sumT.dispose();
|
||||
*/
|
||||
// use js loop sum, faster than uploading tensor to gpu calculating and downloading back
|
||||
const reducedData = await reduced.data(); // raw image rgb array
|
||||
let sum = 0;
|
||||
for (let i = 0; i < reducedData.length / 3; i++) sum += reducedData[3 * i + 2]; // look only at green value of each pixel
|
||||
|
||||
reduced.dispose();
|
||||
const diff = 100 * (Math.max(sum, lastInputSum) / Math.min(sum, lastInputSum) - 1);
|
||||
lastInputSum = sum;
|
||||
// if previous frame was skipped, skip this frame if changed more than cacheSensitivity
|
||||
// if previous frame was not skipped, then look for cacheSensitivity or difference larger than one in previous frame to avoid resetting cache in subsequent frames unnecessarily
|
||||
const skipFrame = diff < Math.max(instance.config.cacheSensitivity, lastCacheDiff);
|
||||
// if difference is above 10x threshold, don't use last value to force reset cache for significant change of scenes or images
|
||||
lastCacheDiff = diff > 10 * instance.config.cacheSensitivity ? 0 : diff;
|
||||
// console.log('skipFrame', skipFrame, this.config.cacheSensitivity, diff);
|
||||
return skipFrame;
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { log } from './helpers';
|
||||
import { GraphModel } from './tfjs/types';
|
||||
import * as facemesh from './blazeface/facemesh';
|
||||
import * as faceres from './faceres/faceres';
|
||||
import * as emotion from './emotion/emotion';
|
||||
|
@ -59,3 +61,39 @@ export async function load(instance) {
|
|||
// if (instance.config.face.enabled && instance.config.face.agegenderrace.enabled && !instance.models.agegenderrace) instance.models.agegenderrace = await agegenderrace.load(instance.config);
|
||||
}
|
||||
}
|
||||
|
||||
export async function validate(instance) {
|
||||
interface Op { name: string, category: string, op: string }
|
||||
const simpleOps = ['const', 'placeholder', 'noop', 'pad', 'squeeze', 'add', 'sub', 'mul', 'div'];
|
||||
for (const defined of Object.keys(instance.models)) {
|
||||
if (instance.models[defined]) { // check if model is loaded
|
||||
let models: GraphModel[] = [];
|
||||
if (Array.isArray(instance.models[defined])) models = instance.models[defined].map((model) => (model.executor ? model : model.model));
|
||||
else models = [instance.models[defined]];
|
||||
for (const model of models) {
|
||||
const ops: string[] = [];
|
||||
// @ts-ignore // executor is a private method
|
||||
const executor = model?.executor;
|
||||
if (executor) {
|
||||
for (const kernel of Object.values(executor.graph.nodes)) {
|
||||
const op = (kernel as Op).op.toLowerCase();
|
||||
if (!ops.includes(op)) ops.push(op);
|
||||
}
|
||||
}
|
||||
const missing: string[] = [];
|
||||
for (const op of ops) {
|
||||
if (!simpleOps.includes(op) // exclude simple ops
|
||||
&& !instance.env.kernels.includes(op) // check actual kernel ops
|
||||
&& !instance.env.kernels.includes(op.replace('_', '')) // check variation without _
|
||||
&& !instance.env.kernels.includes(op.replace('native', '')) // check standard variation
|
||||
&& !instance.env.kernels.includes(op.replace('v2', ''))) { // check non-versioned variation
|
||||
missing.push(op);
|
||||
}
|
||||
}
|
||||
if (!executor && instance.config.debug) log('model executor not found:', defined);
|
||||
if (missing.length > 0 && instance.config.debug) log('model validation:', defined, missing);
|
||||
}
|
||||
}
|
||||
}
|
||||
// log.data('ops used by model:', ops);
|
||||
}
|
||||
|
|
|
@ -1,92 +1,107 @@
|
|||
/**
|
||||
* Custom TFJS backend for Human based on WebGL
|
||||
* Not used by default
|
||||
*/
|
||||
|
||||
import { log } from '../helpers';
|
||||
import { log, now } from '../helpers';
|
||||
import * as humangl from './humangl';
|
||||
import * as env from '../env';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
export const config = {
|
||||
name: 'humangl',
|
||||
priority: 99,
|
||||
canvas: <null | OffscreenCanvas | HTMLCanvasElement>null,
|
||||
gl: <null | WebGL2RenderingContext>null,
|
||||
width: 1024,
|
||||
height: 1024,
|
||||
extensions: <string[]> [],
|
||||
webGLattr: { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.2
|
||||
alpha: false,
|
||||
antialias: false,
|
||||
premultipliedAlpha: false,
|
||||
preserveDrawingBuffer: false,
|
||||
depth: false,
|
||||
stencil: false,
|
||||
failIfMajorPerformanceCaveat: false,
|
||||
desynchronized: true,
|
||||
},
|
||||
};
|
||||
export async function check(instance) {
|
||||
if (instance.initial || (instance.config.backend && (instance.config.backend.length > 0) && (tf.getBackend() !== instance.config.backend))) {
|
||||
const timeStamp = now();
|
||||
instance.state = 'backend';
|
||||
/* force backend reload
|
||||
if (instance.config.backend in tf.engine().registry) {
|
||||
const backendFactory = tf.findBackendFactory(instance.config.backend);
|
||||
tf.removeBackend(instance.config.backend);
|
||||
tf.registerBackend(instance.config.backend, backendFactory);
|
||||
} else {
|
||||
log('Backend not registred:', instance.config.backend);
|
||||
}
|
||||
*/
|
||||
|
||||
function extensions(): void {
|
||||
/*
|
||||
https://www.khronos.org/registry/webgl/extensions/
|
||||
https://webglreport.com/?v=2
|
||||
*/
|
||||
const gl = config.gl;
|
||||
if (!gl) return;
|
||||
config.extensions = gl.getSupportedExtensions() as string[];
|
||||
// gl.getExtension('KHR_parallel_shader_compile');
|
||||
}
|
||||
if (instance.config.backend && instance.config.backend.length > 0) {
|
||||
// detect web worker
|
||||
// @ts-ignore ignore missing type for WorkerGlobalScope as that is the point
|
||||
if (typeof window === 'undefined' && typeof WorkerGlobalScope !== 'undefined' && instance.config.debug) {
|
||||
log('running inside web worker');
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers custom WebGL2 backend to be used by Human library
|
||||
*
|
||||
* @returns void
|
||||
*/
|
||||
export function register(): void {
|
||||
if (!tf.findBackend(config.name)) {
|
||||
// log('backend registration:', config.name);
|
||||
try {
|
||||
config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas');
|
||||
} catch (err) {
|
||||
log('error: cannot create canvas:', err);
|
||||
return;
|
||||
// force browser vs node backend
|
||||
if (env.env.browser && instance.config.backend === 'tensorflow') {
|
||||
log('override: backend set to tensorflow while running in browser');
|
||||
instance.config.backend = 'humangl';
|
||||
}
|
||||
if (env.env.node && (instance.config.backend === 'webgl' || instance.config.backend === 'humangl')) {
|
||||
log(`override: backend set to ${instance.config.backend} while running in nodejs`);
|
||||
instance.config.backend = 'tensorflow';
|
||||
}
|
||||
|
||||
// handle webgpu
|
||||
if (env.env.browser && instance.config.backend === 'webgpu') {
|
||||
if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') {
|
||||
log('override: backend set to webgpu but browser does not support webgpu');
|
||||
instance.config.backend = 'humangl';
|
||||
} else {
|
||||
const adapter = await navigator['gpu'].requestAdapter();
|
||||
if (instance.config.debug) log('enumerated webgpu adapter:', adapter);
|
||||
}
|
||||
}
|
||||
|
||||
// check available backends
|
||||
if (instance.config.backend === 'humangl') humangl.register();
|
||||
const available = Object.keys(tf.engine().registryFactory);
|
||||
if (instance.config.debug) log('available backends:', available);
|
||||
|
||||
if (!available.includes(instance.config.backend)) {
|
||||
log(`error: backend ${instance.config.backend} not found in registry`);
|
||||
instance.config.backend = env.env.node ? 'tensorflow' : 'humangl';
|
||||
log(`override: setting backend ${instance.config.backend}`);
|
||||
}
|
||||
|
||||
if (instance.config.debug) log('setting backend:', instance.config.backend);
|
||||
|
||||
// handle wasm
|
||||
if (instance.config.backend === 'wasm') {
|
||||
if (instance.config.debug) log('wasm path:', instance.config.wasmPath);
|
||||
if (typeof tf?.setWasmPaths !== 'undefined') await tf.setWasmPaths(instance.config.wasmPath);
|
||||
else throw new Error('Human: WASM backend is not loaded');
|
||||
const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
|
||||
const mt = await tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
|
||||
if (instance.config.debug) log(`wasm execution: ${simd ? 'SIMD' : 'no SIMD'} ${mt ? 'multithreaded' : 'singlethreaded'}`);
|
||||
if (instance.config.debug && !simd) log('warning: wasm simd support is not enabled');
|
||||
}
|
||||
|
||||
await tf.setBackend(instance.config.backend);
|
||||
|
||||
try {
|
||||
await tf.setBackend(instance.config.backend);
|
||||
await tf.ready();
|
||||
} catch (err) {
|
||||
log('error: cannot set backend:', instance.config.backend, err);
|
||||
}
|
||||
}
|
||||
try {
|
||||
config.gl = config.canvas.getContext('webgl2', config.webGLattr) as WebGL2RenderingContext;
|
||||
} catch (err) {
|
||||
log('error: cannot get WebGL2 context:', err);
|
||||
return;
|
||||
|
||||
// handle webgl & humangl
|
||||
if (tf.getBackend() === 'humangl') {
|
||||
tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
|
||||
tf.ENV.set('WEBGL_CPU_FORWARD', true);
|
||||
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', false);
|
||||
tf.ENV.set('WEBGL_USE_SHAPES_UNIFORMS', true);
|
||||
// if (!instance.config.object.enabled) tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
|
||||
if (typeof instance.config['deallocate'] !== 'undefined' && instance.config['deallocate']) { // hidden param
|
||||
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true);
|
||||
tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0);
|
||||
}
|
||||
// @ts-ignore getGPGPUContext only exists on WebGL backend
|
||||
const gl = await tf.backend().getGPGPUContext().gl;
|
||||
if (instance.config.debug) log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
|
||||
}
|
||||
try {
|
||||
tf.setWebGLContext(2, config.gl);
|
||||
} catch (err) {
|
||||
log('error: cannot set WebGL2 context:', err);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const ctx = new tf.GPGPUContext(config.gl);
|
||||
tf.registerBackend(config.name, () => new tf.MathBackendWebGL(ctx), config.priority);
|
||||
} catch (err) {
|
||||
log('error: cannot register WebGL backend:', err);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const kernels = tf.getKernelsForBackend('webgl');
|
||||
kernels.forEach((kernelConfig) => {
|
||||
const newKernelConfig = { ...kernelConfig, backendName: config.name };
|
||||
tf.registerKernel(newKernelConfig);
|
||||
});
|
||||
} catch (err) {
|
||||
log('error: cannot update WebGL backend registration:', err);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
tf.ENV.set('WEBGL_VERSION', 2);
|
||||
} catch (err) {
|
||||
log('error: cannot set WebGL backend flags:', err);
|
||||
return;
|
||||
}
|
||||
extensions();
|
||||
log('backend registered:', config.name);
|
||||
|
||||
// wait for ready
|
||||
tf.enableProdMode();
|
||||
await tf.ready();
|
||||
instance.performance.backend = Math.trunc(now() - timeStamp);
|
||||
instance.config.backend = tf.getBackend();
|
||||
|
||||
env.get(); // update env on backend init
|
||||
instance.env = env.env;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
/**
|
||||
* Custom TFJS backend for Human based on WebGL
|
||||
* Not used by default
|
||||
*/
|
||||
|
||||
import { log } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
export const config = {
|
||||
name: 'humangl',
|
||||
priority: 99,
|
||||
canvas: <null | OffscreenCanvas | HTMLCanvasElement>null,
|
||||
gl: <null | WebGL2RenderingContext>null,
|
||||
width: 1024,
|
||||
height: 1024,
|
||||
extensions: <string[]> [],
|
||||
webGLattr: { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.2
|
||||
alpha: false,
|
||||
antialias: false,
|
||||
premultipliedAlpha: false,
|
||||
preserveDrawingBuffer: false,
|
||||
depth: false,
|
||||
stencil: false,
|
||||
failIfMajorPerformanceCaveat: false,
|
||||
desynchronized: true,
|
||||
},
|
||||
};
|
||||
|
||||
function extensions(): void {
|
||||
/*
|
||||
https://www.khronos.org/registry/webgl/extensions/
|
||||
https://webglreport.com/?v=2
|
||||
*/
|
||||
const gl = config.gl;
|
||||
if (!gl) return;
|
||||
config.extensions = gl.getSupportedExtensions() as string[];
|
||||
// gl.getExtension('KHR_parallel_shader_compile');
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers custom WebGL2 backend to be used by Human library
|
||||
*
|
||||
* @returns void
|
||||
*/
|
||||
export function register(): void {
|
||||
if (!tf.findBackend(config.name)) {
|
||||
// log('backend registration:', config.name);
|
||||
try {
|
||||
config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas');
|
||||
} catch (err) {
|
||||
log('error: cannot create canvas:', err);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
config.gl = config.canvas.getContext('webgl2', config.webGLattr) as WebGL2RenderingContext;
|
||||
} catch (err) {
|
||||
log('error: cannot get WebGL2 context:', err);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
tf.setWebGLContext(2, config.gl);
|
||||
} catch (err) {
|
||||
log('error: cannot set WebGL2 context:', err);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const ctx = new tf.GPGPUContext(config.gl);
|
||||
tf.registerBackend(config.name, () => new tf.MathBackendWebGL(ctx), config.priority);
|
||||
} catch (err) {
|
||||
log('error: cannot register WebGL backend:', err);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const kernels = tf.getKernelsForBackend('webgl');
|
||||
kernels.forEach((kernelConfig) => {
|
||||
const newKernelConfig = { ...kernelConfig, backendName: config.name };
|
||||
tf.registerKernel(newKernelConfig);
|
||||
});
|
||||
} catch (err) {
|
||||
log('error: cannot update WebGL backend registration:', err);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
tf.ENV.set('WEBGL_VERSION', 2);
|
||||
} catch (err) {
|
||||
log('error: cannot set WebGL backend flags:', err);
|
||||
return;
|
||||
}
|
||||
extensions();
|
||||
log('backend registered:', config.name);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
import { log, now, mergeDeep } from './helpers';
|
||||
import * as sample from './sample';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import { Config } from './config';
|
||||
import { Result } from './result';
|
||||
|
||||
async function warmupBitmap(instance) {
|
||||
const b64toBlob = (base64: string, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob());
|
||||
let blob;
|
||||
let res;
|
||||
switch (instance.config.warmup) {
|
||||
case 'face': blob = await b64toBlob(sample.face); break;
|
||||
case 'full': blob = await b64toBlob(sample.body); break;
|
||||
default: blob = null;
|
||||
}
|
||||
if (blob) {
|
||||
const bitmap = await createImageBitmap(blob);
|
||||
res = await instance.detect(bitmap, instance.config);
|
||||
bitmap.close();
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
async function warmupCanvas(instance) {
|
||||
return new Promise((resolve) => {
|
||||
let src;
|
||||
let size = 0;
|
||||
switch (instance.config.warmup) {
|
||||
case 'face':
|
||||
size = 256;
|
||||
src = 'data:image/jpeg;base64,' + sample.face;
|
||||
break;
|
||||
case 'full':
|
||||
case 'body':
|
||||
size = 1200;
|
||||
src = 'data:image/jpeg;base64,' + sample.body;
|
||||
break;
|
||||
default:
|
||||
src = null;
|
||||
}
|
||||
// src = encodeURI('../assets/human-sample-upper.jpg');
|
||||
const img = new Image();
|
||||
img.onload = async () => {
|
||||
const canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(size, size) : document.createElement('canvas');
|
||||
canvas.width = img.naturalWidth;
|
||||
canvas.height = img.naturalHeight;
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx?.drawImage(img, 0, 0);
|
||||
// const data = ctx?.getImageData(0, 0, canvas.height, canvas.width);
|
||||
const res = await instance.detect(canvas, instance.config);
|
||||
resolve(res);
|
||||
};
|
||||
if (src) img.src = src;
|
||||
else resolve(null);
|
||||
});
|
||||
}
|
||||
|
||||
async function warmupNode(instance) {
|
||||
const atob = (str: string) => Buffer.from(str, 'base64');
|
||||
let img;
|
||||
if (instance.config.warmup === 'face') img = atob(sample.face);
|
||||
if (instance.config.warmup === 'body' || instance.config.warmup === 'full') img = atob(sample.body);
|
||||
if (!img) return null;
|
||||
let res;
|
||||
if (typeof tf['node'] !== 'undefined') {
|
||||
const data = tf['node'].decodeJpeg(img);
|
||||
const expanded = data.expandDims(0);
|
||||
instance.tf.dispose(data);
|
||||
// log('Input:', expanded);
|
||||
res = await instance.detect(expanded, instance.config);
|
||||
instance.tf.dispose(expanded);
|
||||
} else {
|
||||
if (instance.config.debug) log('Warmup tfjs-node not loaded');
|
||||
/*
|
||||
const input = await canvasJS.loadImage(img);
|
||||
const canvas = canvasJS.createCanvas(input.width, input.height);
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.drawImage(img, 0, 0, input.width, input.height);
|
||||
res = await instance.detect(input, instance.config);
|
||||
*/
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/** Warmup method pre-initializes all configured models for faster inference
|
||||
* - can take significant time on startup
|
||||
* - only used for `webgl` and `humangl` backends
|
||||
* @param userConfig?: Config
|
||||
*/
|
||||
export async function warmup(instance, userConfig?: Partial<Config>): Promise<Result | { error }> {
|
||||
const t0 = now();
|
||||
if (userConfig) instance.config = mergeDeep(instance.config, userConfig) as Config;
|
||||
if (!instance.config.warmup || instance.config.warmup === 'none') return { error: 'null' };
|
||||
let res;
|
||||
if (typeof createImageBitmap === 'function') res = await warmupBitmap(instance);
|
||||
else if (typeof Image !== 'undefined') res = await warmupCanvas(instance);
|
||||
else res = await warmupNode(instance);
|
||||
const t1 = now();
|
||||
if (instance.config.debug) log('Warmup', instance.config.warmup, Math.round(t1 - t0), 'ms');
|
||||
instance.emit('warmup');
|
||||
return res;
|
||||
}
|
2230
test/build.log
2230
test/build.log
File diff suppressed because it is too large
Load Diff
|
@ -66,7 +66,7 @@ async function testInstance(human) {
|
|||
|
||||
// if (!human.tf) human.tf = tf;
|
||||
log('info', 'human version:', human.version);
|
||||
log('info', 'platform:', human.sysinfo.platform, 'agent:', human.sysinfo.agent);
|
||||
log('info', 'platform:', human.env.platform, 'agent:', human.env.agent);
|
||||
log('info', 'tfjs version:', human.tf.version.tfjs);
|
||||
|
||||
await human.load();
|
||||
|
@ -132,6 +132,7 @@ async function test(Human, inputConfig) {
|
|||
}
|
||||
const t0 = process.hrtime.bigint();
|
||||
const human = new Human(config);
|
||||
// await human.tf.ready();
|
||||
await testInstance(human);
|
||||
config.warmup = 'none';
|
||||
await testWarmup(human, 'default');
|
||||
|
@ -158,6 +159,8 @@ async function test(Human, inputConfig) {
|
|||
testDetect(second, 'samples/ai-face.jpg', 'default'),
|
||||
testDetect(human, 'samples/ai-body.jpg', 'default'),
|
||||
testDetect(second, 'samples/ai-body.jpg', 'default'),
|
||||
testDetect(human, 'samples/ai-upper.jpg', 'default'),
|
||||
testDetect(second, 'samples/ai-upper.jpg', 'default'),
|
||||
]);
|
||||
const t1 = process.hrtime.bigint();
|
||||
log('info', 'test complete:', Math.trunc(Number(t1 - t0) / 1000 / 1000), 'ms');
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
const tf = require('@tensorflow/tfjs/dist/tf.node.js'); // wasm backend requires tfjs to be loaded first
|
||||
const wasm = require('@tensorflow/tfjs-backend-wasm/dist/tf-backend-wasm.node.js'); // wasm backend does not get auto-loaded in nodejs
|
||||
const Human = require('../dist/human.node-wasm.js').default;
|
||||
const test = require('./test-main.js').test;
|
||||
|
||||
|
@ -10,17 +12,20 @@ const config = {
|
|||
async: false,
|
||||
face: {
|
||||
enabled: true,
|
||||
detector: { enabled: true, rotation: true },
|
||||
detector: { enabled: true, rotation: false },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: true },
|
||||
description: { enabled: true },
|
||||
emotion: { enabled: true },
|
||||
},
|
||||
hand: { enabled: true },
|
||||
hand: { enabled: true, rotation: false },
|
||||
body: { enabled: true },
|
||||
object: { enabled: false },
|
||||
object: { enabled: true },
|
||||
segmentation: { enabled: true },
|
||||
filter: { enabled: false },
|
||||
};
|
||||
|
||||
// @ts-ignore // in nodejs+wasm must set explicitly before using human
|
||||
wasm.setWasmPaths(config.wasmPath); tf.setBackend('wasm');
|
||||
|
||||
test(Human, config);
|
||||
|
|
|
@ -14,7 +14,7 @@ const config = {
|
|||
description: { enabled: true },
|
||||
emotion: { enabled: true },
|
||||
},
|
||||
hand: { enabled: true },
|
||||
hand: { enabled: true, rotation: true },
|
||||
body: { enabled: true },
|
||||
object: { enabled: true },
|
||||
segmentation: { enabled: true },
|
||||
|
|
313
test/test.log
313
test/test.log
|
@ -1,120 +1,193 @@
|
|||
2021-09-10 20:56:59 [36mINFO: [39m @vladmandic/human version 2.1.5
|
||||
2021-09-10 20:56:59 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-09-10 20:56:59 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||
2021-09-10 20:56:59 [36mINFO: [39m test-node.js start
|
||||
2021-09-10 20:57:00 [35mSTATE:[39m test-node.js passed: create human
|
||||
2021-09-10 20:57:00 [36mINFO: [39m test-node.js human version: 2.1.5
|
||||
2021-09-10 20:57:00 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-09-10 20:57:00 [36mINFO: [39m test-node.js tfjs version: 3.9.0
|
||||
2021-09-10 20:57:00 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||
2021-09-10 20:57:00 [35mSTATE:[39m test-node.js passed: load models
|
||||
2021-09-10 20:57:00 [35mSTATE:[39m test-node.js result: defined models: 14 loaded models: 7
|
||||
2021-09-10 20:57:00 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||
2021-09-10 20:57:01 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||
2021-09-10 20:57:01 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
|
||||
2021-09-10 20:57:01 [32mDATA: [39m test-node.js result: performance: load: 300 total: 1156
|
||||
2021-09-10 20:57:02 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||
2021-09-10 20:57:02 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-10 20:57:02 [32mDATA: [39m test-node.js result: performance: load: 300 total: 1114
|
||||
2021-09-10 20:57:02 [36mINFO: [39m test-node.js test body variants
|
||||
2021-09-10 20:57:03 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-10 20:57:04 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-09-10 20:57:04 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-09-10 20:57:04 [32mDATA: [39m test-node.js result: performance: load: 300 total: 733
|
||||
2021-09-10 20:57:05 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-10 20:57:05 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-09-10 20:57:05 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-10 20:57:05 [32mDATA: [39m test-node.js result: performance: load: 300 total: 243
|
||||
2021-09-10 20:57:06 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||
2021-09-10 20:57:06 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-09-10 20:57:06 [32mDATA: [39m test-node.js result: performance: load: 300 total: 646
|
||||
2021-09-10 20:57:06 [36mINFO: [39m test-node.js test: first instance
|
||||
2021-09-10 20:57:06 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-10 20:57:07 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-10 20:57:07 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-09-10 20:57:07 [32mDATA: [39m test-node.js result: performance: load: 300 total: 874
|
||||
2021-09-10 20:57:07 [36mINFO: [39m test-node.js test: second instance
|
||||
2021-09-10 20:57:07 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-10 20:57:08 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-10 20:57:08 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-09-10 20:57:08 [32mDATA: [39m test-node.js result: performance: load: 2 total: 769
|
||||
2021-09-10 20:57:08 [36mINFO: [39m test-node.js test: concurrent
|
||||
2021-09-10 20:57:08 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-09-10 20:57:08 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-09-10 20:57:09 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-10 20:57:10 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-10 20:57:13 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-09-10 20:57:13 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":17}
|
||||
2021-09-10 20:57:13 [32mDATA: [39m test-node.js result: performance: load: 300 total: 3047
|
||||
2021-09-10 20:57:13 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-09-10 20:57:13 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":17}
|
||||
2021-09-10 20:57:13 [32mDATA: [39m test-node.js result: performance: load: 2 total: 3047
|
||||
2021-09-10 20:57:14 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-09-10 20:57:14 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-10 20:57:14 [32mDATA: [39m test-node.js result: performance: load: 300 total: 3784
|
||||
2021-09-10 20:57:14 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-09-10 20:57:14 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-10 20:57:14 [32mDATA: [39m test-node.js result: performance: load: 2 total: 3784
|
||||
2021-09-10 20:57:14 [36mINFO: [39m test-node.js test complete: 14000 ms
|
||||
2021-09-10 20:57:14 [36mINFO: [39m test-node-gpu.js start
|
||||
2021-09-10 20:57:15 [33mWARN: [39m test-node-gpu.js stderr: 2021-09-10 20:57:15.061099: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||
2021-09-10 20:57:15 [33mWARN: [39m test-node-gpu.js stderr: 2021-09-10 20:57:15.219172: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||
2021-09-10 20:57:15 [33mWARN: [39m test-node-gpu.js stderr: 2021-09-10 20:57:15.219207: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||
2021-09-10 20:57:15 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||
2021-09-10 20:57:15 [36mINFO: [39m test-node-gpu.js human version: 2.1.5
|
||||
2021-09-10 20:57:15 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-09-10 20:57:15 [36mINFO: [39m test-node-gpu.js tfjs version: 3.9.0
|
||||
2021-09-10 20:57:15 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||
2021-09-10 20:57:15 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||
2021-09-10 20:57:15 [35mSTATE:[39m test-node-gpu.js result: defined models: 14 loaded models: 7
|
||||
2021-09-10 20:57:15 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||
2021-09-10 20:57:16 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||
2021-09-10 20:57:16 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
|
||||
2021-09-10 20:57:16 [32mDATA: [39m test-node-gpu.js result: performance: load: 297 total: 1188
|
||||
2021-09-10 20:57:17 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||
2021-09-10 20:57:17 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-10 20:57:17 [32mDATA: [39m test-node-gpu.js result: performance: load: 297 total: 1103
|
||||
2021-09-10 20:57:17 [36mINFO: [39m test-node-gpu.js test body variants
|
||||
2021-09-10 20:57:18 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-10 20:57:19 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-09-10 20:57:19 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-09-10 20:57:19 [32mDATA: [39m test-node-gpu.js result: performance: load: 297 total: 737
|
||||
2021-09-10 20:57:20 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-10 20:57:20 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-09-10 20:57:20 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-10 20:57:20 [32mDATA: [39m test-node-gpu.js result: performance: load: 297 total: 234
|
||||
2021-09-10 20:57:21 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||
2021-09-10 20:57:21 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-09-10 20:57:21 [32mDATA: [39m test-node-gpu.js result: performance: load: 297 total: 612
|
||||
2021-09-10 20:57:21 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||
2021-09-10 20:57:21 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-10 20:57:22 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-10 20:57:22 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-09-10 20:57:22 [32mDATA: [39m test-node-gpu.js result: performance: load: 297 total: 832
|
||||
2021-09-10 20:57:22 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||
2021-09-10 20:57:22 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-10 20:57:23 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-10 20:57:23 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-09-10 20:57:23 [32mDATA: [39m test-node-gpu.js result: performance: load: 6 total: 806
|
||||
2021-09-10 20:57:23 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||
2021-09-10 20:57:23 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-09-10 20:57:23 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-09-10 20:57:24 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-10 20:57:25 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-10 20:57:28 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-09-10 20:57:28 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":17}
|
||||
2021-09-10 20:57:28 [32mDATA: [39m test-node-gpu.js result: performance: load: 297 total: 3213
|
||||
2021-09-10 20:57:28 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-09-10 20:57:28 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":17}
|
||||
2021-09-10 20:57:28 [32mDATA: [39m test-node-gpu.js result: performance: load: 6 total: 3213
|
||||
2021-09-10 20:57:29 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-09-10 20:57:29 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-10 20:57:29 [32mDATA: [39m test-node-gpu.js result: performance: load: 297 total: 3928
|
||||
2021-09-10 20:57:29 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-09-10 20:57:29 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-10 20:57:29 [32mDATA: [39m test-node-gpu.js result: performance: load: 6 total: 3929
|
||||
2021-09-10 20:57:29 [36mINFO: [39m test-node-gpu.js test complete: 13994 ms
|
||||
2021-09-10 20:57:29 [36mINFO: [39m test-node-wasm.js start
|
||||
2021-09-10 20:57:29 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
||||
2021-09-10 20:57:29 [31mERROR:[39m test-node-wasm.js aborting test
|
||||
2021-09-10 20:57:29 [36mINFO: [39m status: {"passed":46,"failed":1}
|
||||
2021-09-12 18:34:14 [36mINFO: [39m @vladmandic/human version 2.2.0
|
||||
2021-09-12 18:34:14 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||
2021-09-12 18:34:14 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||
2021-09-12 18:34:14 [36mINFO: [39m test-node.js start
|
||||
2021-09-12 18:34:14 [35mSTATE:[39m test-node.js passed: create human
|
||||
2021-09-12 18:34:14 [36mINFO: [39m test-node.js human version: 2.2.0
|
||||
2021-09-12 18:34:14 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-09-12 18:34:14 [36mINFO: [39m test-node.js tfjs version: 3.9.0
|
||||
2021-09-12 18:34:15 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||
2021-09-12 18:34:15 [35mSTATE:[39m test-node.js passed: load models
|
||||
2021-09-12 18:34:15 [35mSTATE:[39m test-node.js result: defined models: 14 loaded models: 7
|
||||
2021-09-12 18:34:15 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||
2021-09-12 18:34:16 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||
2021-09-12 18:34:16 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
|
||||
2021-09-12 18:34:16 [32mDATA: [39m test-node.js result: performance: load: 324 total: 1200
|
||||
2021-09-12 18:34:17 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||
2021-09-12 18:34:17 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-12 18:34:17 [32mDATA: [39m test-node.js result: performance: load: 324 total: 1096
|
||||
2021-09-12 18:34:17 [36mINFO: [39m test-node.js test body variants
|
||||
2021-09-12 18:34:18 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:19 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-09-12 18:34:19 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-09-12 18:34:19 [32mDATA: [39m test-node.js result: performance: load: 324 total: 689
|
||||
2021-09-12 18:34:19 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:20 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-09-12 18:34:20 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-12 18:34:20 [32mDATA: [39m test-node.js result: performance: load: 324 total: 231
|
||||
2021-09-12 18:34:20 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||
2021-09-12 18:34:20 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-09-12 18:34:20 [32mDATA: [39m test-node.js result: performance: load: 324 total: 625
|
||||
2021-09-12 18:34:20 [36mINFO: [39m test-node.js test: first instance
|
||||
2021-09-12 18:34:21 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:21 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:21 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-09-12 18:34:21 [32mDATA: [39m test-node.js result: performance: load: 324 total: 833
|
||||
2021-09-12 18:34:21 [36mINFO: [39m test-node.js test: second instance
|
||||
2021-09-12 18:34:22 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:22 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:22 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-09-12 18:34:22 [32mDATA: [39m test-node.js result: performance: load: 5 total: 339
|
||||
2021-09-12 18:34:22 [36mINFO: [39m test-node.js test: concurrent
|
||||
2021-09-12 18:34:22 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-09-12 18:34:22 [35mSTATE:[39m test-node.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-09-12 18:34:23 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:24 [35mSTATE:[39m test-node.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:24 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:25 [35mSTATE:[39m test-node.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:25 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-09-12 18:34:25 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.92,"keypoints":10}
|
||||
2021-09-12 18:34:25 [32mDATA: [39m test-node.js result: performance: load: 5 total: 881
|
||||
2021-09-12 18:34:26 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-09-12 18:34:26 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.71,"class":"person"} {"score":0.92,"keypoints":10}
|
||||
2021-09-12 18:34:26 [32mDATA: [39m test-node.js result: performance: load: 5 total: 881
|
||||
2021-09-12 18:34:26 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:26 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-09-12 18:34:26 [32mDATA: [39m test-node.js result: performance: load: 5 total: 990
|
||||
2021-09-12 18:34:27 [35mSTATE:[39m test-node.js passed: detect: samples/ai-body.jpg default
|
||||
2021-09-12 18:34:27 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":4}
|
||||
2021-09-12 18:34:27 [32mDATA: [39m test-node.js result: performance: load: 324 total: 2891
|
||||
2021-09-12 18:34:27 [35mSTATE:[39m test-node.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:27 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 2 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":4}
|
||||
2021-09-12 18:34:27 [32mDATA: [39m test-node.js result: performance: load: 324 total: 2891
|
||||
2021-09-12 18:34:28 [35mSTATE:[39m test-node.js passed: detect: samples/ai-face.jpg default
|
||||
2021-09-12 18:34:28 [32mDATA: [39m test-node.js result: face: 2 body: 1 hand: 0 gesture: 9 object: 1 person: 2 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":4}
|
||||
2021-09-12 18:34:28 [32mDATA: [39m test-node.js result: performance: load: 324 total: 3162
|
||||
2021-09-12 18:34:28 [36mINFO: [39m test-node.js test complete: 13509 ms
|
||||
2021-09-12 18:34:28 [36mINFO: [39m test-node-gpu.js start
|
||||
2021-09-12 18:34:28 [33mWARN: [39m test-node-gpu.js stderr: 2021-09-12 18:34:28.915334: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||
2021-09-12 18:34:28 [33mWARN: [39m test-node-gpu.js stderr: 2021-09-12 18:34:28.962912: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||
2021-09-12 18:34:28 [33mWARN: [39m test-node-gpu.js stderr: 2021-09-12 18:34:28.962948: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||
2021-09-12 18:34:29 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||
2021-09-12 18:34:29 [36mINFO: [39m test-node-gpu.js human version: 2.2.0
|
||||
2021-09-12 18:34:29 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-09-12 18:34:29 [36mINFO: [39m test-node-gpu.js tfjs version: 3.9.0
|
||||
2021-09-12 18:34:29 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||
2021-09-12 18:34:29 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||
2021-09-12 18:34:29 [35mSTATE:[39m test-node-gpu.js result: defined models: 14 loaded models: 7
|
||||
2021-09-12 18:34:29 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||
2021-09-12 18:34:30 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||
2021-09-12 18:34:30 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4}
|
||||
2021-09-12 18:34:30 [32mDATA: [39m test-node-gpu.js result: performance: load: 287 total: 1009
|
||||
2021-09-12 18:34:31 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||
2021-09-12 18:34:31 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-12 18:34:31 [32mDATA: [39m test-node-gpu.js result: performance: load: 287 total: 1115
|
||||
2021-09-12 18:34:31 [36mINFO: [39m test-node-gpu.js test body variants
|
||||
2021-09-12 18:34:32 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:33 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-09-12 18:34:33 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
|
||||
2021-09-12 18:34:33 [32mDATA: [39m test-node-gpu.js result: performance: load: 287 total: 734
|
||||
2021-09-12 18:34:33 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:34 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-09-12 18:34:34 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-09-12 18:34:34 [32mDATA: [39m test-node-gpu.js result: performance: load: 287 total: 200
|
||||
2021-09-12 18:34:34 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||
2021-09-12 18:34:34 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||
2021-09-12 18:34:34 [32mDATA: [39m test-node-gpu.js result: performance: load: 287 total: 633
|
||||
2021-09-12 18:34:34 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||
2021-09-12 18:34:35 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:36 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:36 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-09-12 18:34:36 [32mDATA: [39m test-node-gpu.js result: performance: load: 287 total: 838
|
||||
2021-09-12 18:34:36 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||
2021-09-12 18:34:36 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:36 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:36 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-09-12 18:34:36 [32mDATA: [39m test-node-gpu.js result: performance: load: 11 total: 398
|
||||
2021-09-12 18:34:36 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||
2021-09-12 18:34:36 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-09-12 18:34:36 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-09-12 18:34:37 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:38 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:38 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:39 [35mSTATE:[39m test-node-gpu.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:39 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-09-12 18:34:39 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.92,"keypoints":10}
|
||||
2021-09-12 18:34:39 [32mDATA: [39m test-node-gpu.js result: performance: load: 11 total: 877
|
||||
2021-09-12 18:34:40 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-09-12 18:34:40 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.71,"class":"person"} {"score":0.92,"keypoints":10}
|
||||
2021-09-12 18:34:40 [32mDATA: [39m test-node-gpu.js result: performance: load: 11 total: 877
|
||||
2021-09-12 18:34:40 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:40 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":10}
|
||||
2021-09-12 18:34:40 [32mDATA: [39m test-node-gpu.js result: performance: load: 11 total: 984
|
||||
2021-09-12 18:34:42 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-body.jpg default
|
||||
2021-09-12 18:34:42 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":4}
|
||||
2021-09-12 18:34:42 [32mDATA: [39m test-node-gpu.js result: performance: load: 287 total: 3052
|
||||
2021-09-12 18:34:42 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:42 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 2 object: 1 person: 1 {"age":29.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.69,"keypoints":4}
|
||||
2021-09-12 18:34:42 [32mDATA: [39m test-node-gpu.js result: performance: load: 287 total: 3053
|
||||
2021-09-12 18:34:42 [35mSTATE:[39m test-node-gpu.js passed: detect: samples/ai-face.jpg default
|
||||
2021-09-12 18:34:42 [32mDATA: [39m test-node-gpu.js result: face: 2 body: 1 hand: 0 gesture: 9 object: 1 person: 2 {"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":4}
|
||||
2021-09-12 18:34:42 [32mDATA: [39m test-node-gpu.js result: performance: load: 287 total: 3324
|
||||
2021-09-12 18:34:42 [36mINFO: [39m test-node-gpu.js test complete: 13334 ms
|
||||
2021-09-12 18:34:42 [36mINFO: [39m test-node-wasm.js start
|
||||
2021-09-12 18:34:42 [35mSTATE:[39m test-node-wasm.js passed: model server: http://localhost:10030/models/
|
||||
2021-09-12 18:34:42 [35mSTATE:[39m test-node-wasm.js passed: create human
|
||||
2021-09-12 18:34:42 [36mINFO: [39m test-node-wasm.js human version: 2.2.0
|
||||
2021-09-12 18:34:42 [36mINFO: [39m test-node-wasm.js platform: linux x64 agent: NodeJS v16.5.0
|
||||
2021-09-12 18:34:42 [36mINFO: [39m test-node-wasm.js tfjs version: 3.9.0
|
||||
2021-09-12 18:34:43 [35mSTATE:[39m test-node-wasm.js passed: set backend: wasm
|
||||
2021-09-12 18:34:43 [35mSTATE:[39m test-node-wasm.js passed: load models
|
||||
2021-09-12 18:34:43 [35mSTATE:[39m test-node-wasm.js result: defined models: 14 loaded models: 7
|
||||
2021-09-12 18:34:43 [35mSTATE:[39m test-node-wasm.js passed: warmup: none default
|
||||
2021-09-12 18:34:43 [31mERROR:[39m test-node-wasm.js failed: warmup: face default
|
||||
2021-09-12 18:34:43 [31mERROR:[39m test-node-wasm.js failed: warmup: body default
|
||||
2021-09-12 18:34:43 [36mINFO: [39m test-node-wasm.js test body variants
|
||||
2021-09-12 18:34:45 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:46 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-body.jpg posenet
|
||||
2021-09-12 18:34:46 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.96,"keypoints":16}
|
||||
2021-09-12 18:34:46 [32mDATA: [39m test-node-wasm.js result: performance: load: 526 total: 1537
|
||||
2021-09-12 18:34:48 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:49 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-body.jpg movenet
|
||||
2021-09-12 18:34:49 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":17}
|
||||
2021-09-12 18:34:49 [32mDATA: [39m test-node-wasm.js result: performance: load: 526 total: 948
|
||||
2021-09-12 18:34:49 [35mSTATE:[39m test-node-wasm.js passed: detect: random default
|
||||
2021-09-12 18:34:49 [32mDATA: [39m test-node-wasm.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":0.92,"keypoints":17}
|
||||
2021-09-12 18:34:49 [32mDATA: [39m test-node-wasm.js result: performance: load: 526 total: 98
|
||||
2021-09-12 18:34:49 [36mINFO: [39m test-node-wasm.js test: first instance
|
||||
2021-09-12 18:34:49 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:50 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:50 [32mDATA: [39m test-node-wasm.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":0.69,"keypoints":10}
|
||||
2021-09-12 18:34:50 [32mDATA: [39m test-node-wasm.js result: performance: load: 526 total: 139
|
||||
2021-09-12 18:34:50 [36mINFO: [39m test-node-wasm.js test: second instance
|
||||
2021-09-12 18:34:50 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:50 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:50 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.69,"keypoints":10}
|
||||
2021-09-12 18:34:50 [32mDATA: [39m test-node-wasm.js result: performance: load: 3 total: 432
|
||||
2021-09-12 18:34:50 [36mINFO: [39m test-node-wasm.js test: concurrent
|
||||
2021-09-12 18:34:51 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-09-12 18:34:51 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-face.jpg [1,256,256,3]
|
||||
2021-09-12 18:34:52 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:54 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-body.jpg [1,1200,1200,3]
|
||||
2021-09-12 18:34:54 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:55 [35mSTATE:[39m test-node-wasm.js passed: load image: samples/ai-upper.jpg [1,720,688,3]
|
||||
2021-09-12 18:34:55 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-face.jpg default
|
||||
2021-09-12 18:34:55 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.92,"keypoints":10}
|
||||
2021-09-12 18:34:55 [32mDATA: [39m test-node-wasm.js result: performance: load: 3 total: 849
|
||||
2021-09-12 18:34:56 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-body.jpg default
|
||||
2021-09-12 18:34:56 [32mDATA: [39m test-node-wasm.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":0.92,"keypoints":10}
|
||||
2021-09-12 18:34:56 [32mDATA: [39m test-node-wasm.js result: performance: load: 3 total: 849
|
||||
2021-09-12 18:34:56 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:56 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":28.5,"gender":"female"} {} {"score":0.69,"keypoints":10}
|
||||
2021-09-12 18:34:56 [32mDATA: [39m test-node-wasm.js result: performance: load: 3 total: 914
|
||||
2021-09-12 18:34:57 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-body.jpg default
|
||||
2021-09-12 18:34:57 [32mDATA: [39m test-node-wasm.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":0.92,"keypoints":10}
|
||||
2021-09-12 18:34:57 [32mDATA: [39m test-node-wasm.js result: performance: load: 526 total: 2551
|
||||
2021-09-12 18:34:57 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-upper.jpg default
|
||||
2021-09-12 18:34:57 [32mDATA: [39m test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"age":29.5,"gender":"female"} {} {"score":0.69,"keypoints":10}
|
||||
2021-09-12 18:34:57 [32mDATA: [39m test-node-wasm.js result: performance: load: 526 total: 2608
|
||||
2021-09-12 18:34:57 [35mSTATE:[39m test-node-wasm.js passed: detect: samples/ai-face.jpg default
|
||||
2021-09-12 18:34:57 [32mDATA: [39m test-node-wasm.js result: face: 2 body: 1 hand: 0 gesture: 9 object: 0 person: 2 {"age":23.6,"gender":"female"} {} {"score":0.47,"keypoints":4}
|
||||
2021-09-12 18:34:57 [32mDATA: [39m test-node-wasm.js result: performance: load: 526 total: 2816
|
||||
2021-09-12 18:34:57 [36mINFO: [39m test-node-wasm.js test complete: 15083 ms
|
||||
2021-09-12 18:34:58 [36mINFO: [39m status: {"passed":80,"failed":2}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -5,7 +5,6 @@ import { Config } from './config';
|
|||
import { Result } from './result';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as facemesh from './blazeface/facemesh';
|
||||
import * as image from './image/image';
|
||||
import * as draw from './draw/draw';
|
||||
import * as env from './env';
|
||||
import { Tensor, GraphModel } from './tfjs/types';
|
||||
|
@ -63,8 +62,6 @@ export declare class Human {
|
|||
* - Progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle'
|
||||
*/
|
||||
state: string;
|
||||
/** process input and return tensor and canvas */
|
||||
image: typeof image.process;
|
||||
/** currenty processed image tensor and canvas */
|
||||
process: {
|
||||
tensor: Tensor | null;
|
||||
|
@ -128,6 +125,7 @@ export declare class Human {
|
|||
faceUVMap: typeof facemesh.uvmap;
|
||||
/** Performance object that contains values for all recently performed operations */
|
||||
performance: Record<string, number>;
|
||||
initial: boolean;
|
||||
/**
|
||||
* Creates instance of Human library that is futher used for all operations
|
||||
* @param userConfig: {@link Config}
|
||||
|
@ -135,6 +133,15 @@ export declare class Human {
|
|||
constructor(userConfig?: Partial<Config>);
|
||||
/** @hidden */
|
||||
analyze: (...msg: string[]) => void;
|
||||
/** Process input as return canvas and tensor
|
||||
*
|
||||
* @param input: {@link Input}
|
||||
* @returns { tensor, canvas }
|
||||
*/
|
||||
image: (input: Input) => {
|
||||
tensor: Tensor<import("@tensorflow/tfjs-core").Rank> | null;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement;
|
||||
};
|
||||
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
||||
* - Calculation is based on normalized Minkowski distance between
|
||||
*
|
||||
|
@ -179,6 +186,8 @@ export declare class Human {
|
|||
* @param userConfig?: {@link Config}
|
||||
*/
|
||||
load(userConfig?: Partial<Config>): Promise<void>;
|
||||
/** @hidden */
|
||||
emit: (event: string) => boolean;
|
||||
/**
|
||||
* Runs interpolation using last known result and returns smoothened result
|
||||
* Interpolation is based on time since last known result so can be called independently
|
||||
|
@ -187,25 +196,25 @@ export declare class Human {
|
|||
* @returns result: {@link Result}
|
||||
*/
|
||||
next: (result?: Result | undefined) => Result;
|
||||
/** Warmup method pre-initializes all configured models for faster inference
|
||||
* - can take significant time on startup
|
||||
* - only used for `webgl` and `humangl` backends
|
||||
* @param userConfig?: {@link Config}
|
||||
*/
|
||||
warmup: (userConfig?: Partial<Config> | undefined) => Promise<Result | {
|
||||
error: any;
|
||||
}>;
|
||||
/** Main detection method
|
||||
* - Analyze configuration: {@link Config}
|
||||
* - Pre-this.process input: {@link Input}
|
||||
* - Run inference for all configured models
|
||||
* - this.process and return result: {@link Result}
|
||||
* - Process and return result: {@link Result}
|
||||
*
|
||||
* @param input: Input
|
||||
* @param input: {@link Input}
|
||||
* @param userConfig?: {@link Config}
|
||||
* @returns result: {@link Result}
|
||||
*/
|
||||
detect(input: Input, userConfig?: Partial<Config>): Promise<Result | Error>;
|
||||
/** Warmup method pre-initializes all configured models for faster inference
|
||||
* - can take significant time on startup
|
||||
* - only used for `webgl` and `humangl` backends
|
||||
* @param userConfig?: Config
|
||||
*/
|
||||
warmup(userConfig?: Partial<Config>): Promise<Result | {
|
||||
error: any;
|
||||
}>;
|
||||
}
|
||||
/**
|
||||
* Class Human is also available as default export
|
||||
|
|
|
@ -8,4 +8,5 @@ export declare function process(input: Input, config: Config): {
|
|||
tensor: Tensor | null;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement;
|
||||
};
|
||||
export declare function skip(instance: any, input: Tensor): Promise<boolean>;
|
||||
export {};
|
||||
|
|
|
@ -3,3 +3,4 @@
|
|||
* @param userinstance.config?: {@link instance.config}
|
||||
*/
|
||||
export declare function load(instance: any): Promise<void>;
|
||||
export declare function validate(instance: any): Promise<void>;
|
||||
|
|
|
@ -1,29 +1 @@
|
|||
/**
|
||||
* Custom TFJS backend for Human based on WebGL
|
||||
* Not used by default
|
||||
*/
|
||||
export declare const config: {
|
||||
name: string;
|
||||
priority: number;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement | null;
|
||||
gl: WebGL2RenderingContext | null;
|
||||
width: number;
|
||||
height: number;
|
||||
extensions: string[];
|
||||
webGLattr: {
|
||||
alpha: boolean;
|
||||
antialias: boolean;
|
||||
premultipliedAlpha: boolean;
|
||||
preserveDrawingBuffer: boolean;
|
||||
depth: boolean;
|
||||
stencil: boolean;
|
||||
failIfMajorPerformanceCaveat: boolean;
|
||||
desynchronized: boolean;
|
||||
};
|
||||
};
|
||||
/**
|
||||
* Registers custom WebGL2 backend to be used by Human library
|
||||
*
|
||||
* @returns void
|
||||
*/
|
||||
export declare function register(): void;
|
||||
export declare function check(instance: any): Promise<void>;
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
/**
|
||||
* Custom TFJS backend for Human based on WebGL
|
||||
* Not used by default
|
||||
*/
|
||||
export declare const config: {
|
||||
name: string;
|
||||
priority: number;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement | null;
|
||||
gl: WebGL2RenderingContext | null;
|
||||
width: number;
|
||||
height: number;
|
||||
extensions: string[];
|
||||
webGLattr: {
|
||||
alpha: boolean;
|
||||
antialias: boolean;
|
||||
premultipliedAlpha: boolean;
|
||||
preserveDrawingBuffer: boolean;
|
||||
depth: boolean;
|
||||
stencil: boolean;
|
||||
failIfMajorPerformanceCaveat: boolean;
|
||||
desynchronized: boolean;
|
||||
};
|
||||
};
|
||||
/**
|
||||
* Registers custom WebGL2 backend to be used by Human library
|
||||
*
|
||||
* @returns void
|
||||
*/
|
||||
export declare function register(): void;
|
|
@ -0,0 +1,10 @@
|
|||
import { Config } from './config';
|
||||
import { Result } from './result';
|
||||
/** Warmup method pre-initializes all configured models for faster inference
|
||||
* - can take significant time on startup
|
||||
* - only used for `webgl` and `humangl` backends
|
||||
* @param userConfig?: Config
|
||||
*/
|
||||
export declare function warmup(instance: any, userConfig?: Partial<Config>): Promise<Result | {
|
||||
error: any;
|
||||
}>;
|
Loading…
Reference in New Issue