mirror of https://github.com/vladmandic/human
add simple webcam and webrtc demo
parent
43a91ba5e0
commit
5f68153af7
|
@ -9,8 +9,9 @@
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
### **HEAD -> main** 2021/09/14 mandic00@live.com
|
### **HEAD -> main** 2021/09/15 mandic00@live.com
|
||||||
|
|
||||||
|
- reorganize tfjs bundle
|
||||||
- experimental custom tfjs bundle - disabled
|
- experimental custom tfjs bundle - disabled
|
||||||
- add platform and backend capabilities detection
|
- add platform and backend capabilities detection
|
||||||
- enhanced automated tests
|
- enhanced automated tests
|
||||||
|
|
|
@ -9,6 +9,22 @@ async function log(...msg) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* helper implementation of webrtc
|
||||||
|
* performs:
|
||||||
|
* - discovery
|
||||||
|
* - handshake
|
||||||
|
* - connct to webrtc stream
|
||||||
|
* - assign webrtc stream to video element
|
||||||
|
*
|
||||||
|
* for development purposes i'm using test webrtc server that reads rtsp stream from a security camera:
|
||||||
|
* <https://github.com/vladmandic/stream-rtsp>
|
||||||
|
*
|
||||||
|
* @param {string} server
|
||||||
|
* @param {string} streamName
|
||||||
|
* @param {HTMLVideoElement} elementName
|
||||||
|
* @return {promise}
|
||||||
|
*/
|
||||||
async function webRTC(server, streamName, elementName) {
|
async function webRTC(server, streamName, elementName) {
|
||||||
const suuid = streamName;
|
const suuid = streamName;
|
||||||
log('client starting');
|
log('client starting');
|
||||||
|
|
|
@ -904,7 +904,6 @@ async function pwaRegister() {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
/*
|
|
||||||
window.addEventListener('unhandledrejection', (evt) => {
|
window.addEventListener('unhandledrejection', (evt) => {
|
||||||
// eslint-disable-next-line no-console
|
// eslint-disable-next-line no-console
|
||||||
console.error(evt.reason || evt);
|
console.error(evt.reason || evt);
|
||||||
|
@ -912,7 +911,6 @@ async function main() {
|
||||||
status('exception error');
|
status('exception error');
|
||||||
evt.preventDefault();
|
evt.preventDefault();
|
||||||
});
|
});
|
||||||
*/
|
|
||||||
|
|
||||||
log('demo starting ...');
|
log('demo starting ...');
|
||||||
|
|
||||||
|
|
|
@ -492,17 +492,16 @@ export async function person(inCanvas: HTMLCanvasElement, result: Array<PersonRe
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement) {
|
export async function canvas(input: HTMLCanvasElement | HTMLImageElement | HTMLMediaElement | HTMLVideoElement, output: HTMLCanvasElement) {
|
||||||
if (!inCanvas || !outCanvas) return;
|
if (!input || !output) return;
|
||||||
getCanvasContext(outCanvas);
|
const ctx = getCanvasContext(output);
|
||||||
const ctx = getCanvasContext(inCanvas);
|
ctx.drawImage(input, 0, 0);
|
||||||
ctx.drawImage(inCanvas, 0, 0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptions?: DrawOptions) {
|
export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptions?: DrawOptions) {
|
||||||
|
if (!result || !result.performance || !result || !inCanvas) return null;
|
||||||
const timestamp = now();
|
const timestamp = now();
|
||||||
const localOptions = mergeDeep(options, drawOptions);
|
const localOptions = mergeDeep(options, drawOptions);
|
||||||
if (!result || !inCanvas) return null;
|
|
||||||
|
|
||||||
const promise = Promise.all([
|
const promise = Promise.all([
|
||||||
face(inCanvas, result.face, localOptions),
|
face(inCanvas, result.face, localOptions),
|
||||||
|
|
12
src/human.ts
12
src/human.ts
|
@ -150,7 +150,6 @@ export class Human {
|
||||||
#analyzeMemoryLeaks: boolean;
|
#analyzeMemoryLeaks: boolean;
|
||||||
#checkSanity: boolean;
|
#checkSanity: boolean;
|
||||||
initial: boolean;
|
initial: boolean;
|
||||||
|
|
||||||
// definition end
|
// definition end
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -256,7 +255,7 @@ export class Human {
|
||||||
* @param background?: {@link Input}
|
* @param background?: {@link Input}
|
||||||
* @returns Canvas
|
* @returns Canvas
|
||||||
*/
|
*/
|
||||||
segmentation(input: Input, background?: Input) {
|
async segmentation(input: Input, background?: Input) {
|
||||||
return input ? segmentation.process(input, background, this.config) : null;
|
return input ? segmentation.process(input, background, this.config) : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -327,14 +326,19 @@ export class Human {
|
||||||
* @param result?: {@link Result} optional use specific result set to run interpolation on
|
* @param result?: {@link Result} optional use specific result set to run interpolation on
|
||||||
* @returns result: {@link Result}
|
* @returns result: {@link Result}
|
||||||
*/
|
*/
|
||||||
next = (result?: Result) => interpolate.calc(result || this.result) as Result;
|
next(result: Result = this.result) {
|
||||||
|
return interpolate.calc(result) as Result;
|
||||||
|
}
|
||||||
|
|
||||||
/** Warmup method pre-initializes all configured models for faster inference
|
/** Warmup method pre-initializes all configured models for faster inference
|
||||||
* - can take significant time on startup
|
* - can take significant time on startup
|
||||||
* - only used for `webgl` and `humangl` backends
|
* - only used for `webgl` and `humangl` backends
|
||||||
* @param userConfig?: {@link Config}
|
* @param userConfig?: {@link Config}
|
||||||
|
* @returns result: {@link Result}
|
||||||
*/
|
*/
|
||||||
warmup = (userConfig?: Partial<Config>) => warmups.warmup(this, userConfig) as Promise<Result | { error }>
|
async warmup(userConfig?: Partial<Config>): Promise<Result | { error }> {
|
||||||
|
return warmups.warmup(this, userConfig) as Promise<Result | { error }>;
|
||||||
|
}
|
||||||
|
|
||||||
/** Main detection method
|
/** Main detection method
|
||||||
* - Analyze configuration: {@link Config}
|
* - Analyze configuration: {@link Config}
|
||||||
|
|
|
@ -7,6 +7,7 @@ import * as fxImage from './imagefx';
|
||||||
import type { Tensor } from '../tfjs/types';
|
import type { Tensor } from '../tfjs/types';
|
||||||
import type { Config } from '../config';
|
import type { Config } from '../config';
|
||||||
import { env } from '../env';
|
import { env } from '../env';
|
||||||
|
import { log } from '../helpers';
|
||||||
|
|
||||||
type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas | typeof Image | typeof env.Canvas;
|
type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas | typeof Image | typeof env.Canvas;
|
||||||
|
|
||||||
|
@ -62,9 +63,16 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
|
||||||
else throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${(input as unknown as Tensor).shape}`);
|
else throw new Error(`Human: Input tensor shape must be [1, height, width, 3] and instead was ${(input as unknown as Tensor).shape}`);
|
||||||
} else {
|
} else {
|
||||||
// check if resizing will be needed
|
// check if resizing will be needed
|
||||||
|
if (typeof input['readyState'] !== 'undefined' && input['readyState'] <= 2) {
|
||||||
|
log('input stream is not ready');
|
||||||
|
return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
|
||||||
|
}
|
||||||
const originalWidth = input['naturalWidth'] || input['videoWidth'] || input['width'] || (input['shape'] && (input['shape'][1] > 0));
|
const originalWidth = input['naturalWidth'] || input['videoWidth'] || input['width'] || (input['shape'] && (input['shape'][1] > 0));
|
||||||
const originalHeight = input['naturalHeight'] || input['videoHeight'] || input['height'] || (input['shape'] && (input['shape'][2] > 0));
|
const originalHeight = input['naturalHeight'] || input['videoHeight'] || input['height'] || (input['shape'] && (input['shape'][2] > 0));
|
||||||
if (!originalWidth || !originalHeight) return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
|
if (!originalWidth || !originalHeight) {
|
||||||
|
log('cannot determine input dimensions');
|
||||||
|
return { tensor: null, canvas: inCanvas }; // video may become temporarily unavailable due to onresize
|
||||||
|
}
|
||||||
let targetWidth = originalWidth;
|
let targetWidth = originalWidth;
|
||||||
let targetHeight = originalHeight;
|
let targetHeight = originalHeight;
|
||||||
if (targetWidth > maxSize) {
|
if (targetWidth > maxSize) {
|
||||||
|
@ -153,7 +161,6 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
|
||||||
outCanvas = inCanvas;
|
outCanvas = inCanvas;
|
||||||
if (fx) fx = null;
|
if (fx) fx = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// create tensor from image if tensor is not already defined
|
// create tensor from image if tensor is not already defined
|
||||||
if (!tensor) {
|
if (!tensor) {
|
||||||
let pixels;
|
let pixels;
|
||||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
||||||
Subproject commit 44b1bf12ab5dbf4cedde34da123237b1cd02627b
|
Subproject commit deadc5d4f7db0dca06bbe8bbb956e01287ef3920
|
Loading…
Reference in New Issue