mirror of https://github.com/vladmandic/human
enable body segmentation and background replacement
parent
5c6ba688c9
commit
302cc31f59
|
@ -11,6 +11,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
||||||
|
|
||||||
### **HEAD -> main** 2021/06/04 mandic00@live.com
|
### **HEAD -> main** 2021/06/04 mandic00@live.com
|
||||||
|
|
||||||
|
- added experimental body segmentation module
|
||||||
- add meet and selfie models
|
- add meet and selfie models
|
||||||
- add live hints to demo
|
- add live hints to demo
|
||||||
- switch worker from module to iife importscripts
|
- switch worker from module to iife importscripts
|
||||||
|
|
|
@ -65,6 +65,7 @@
|
||||||
.icon { width: 180px; text-align: -webkit-center; text-align: -moz-center; filter: grayscale(1); }
|
.icon { width: 180px; text-align: -webkit-center; text-align: -moz-center; filter: grayscale(1); }
|
||||||
.icon:hover { background: #505050; filter: grayscale(0); }
|
.icon:hover { background: #505050; filter: grayscale(0); }
|
||||||
.hint { opacity: 0; transition-duration: 0.5s; transition-property: opacity; font-style: italic; position: fixed; top: 5rem; padding: 8px; margin: 8px; box-shadow: 0 0 2px 2px #303030; }
|
.hint { opacity: 0; transition-duration: 0.5s; transition-property: opacity; font-style: italic; position: fixed; top: 5rem; padding: 8px; margin: 8px; box-shadow: 0 0 2px 2px #303030; }
|
||||||
|
.input-file { align-self: center; width: 5rem; }
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
|
|
|
@ -5,12 +5,16 @@
|
||||||
|
|
||||||
import { log, join } from '../helpers';
|
import { log, join } from '../helpers';
|
||||||
import * as tf from '../../dist/tfjs.esm.js';
|
import * as tf from '../../dist/tfjs.esm.js';
|
||||||
|
import { Config } from '../config';
|
||||||
|
import { GraphModel, Tensor } from '../tfjs/types';
|
||||||
|
|
||||||
|
let model: GraphModel;
|
||||||
|
|
||||||
let model;
|
|
||||||
let last = { age: 0 };
|
let last = { age: 0 };
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
export async function load(config) {
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
export async function load(config: Config | any) {
|
||||||
if (!model) {
|
if (!model) {
|
||||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath));
|
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath));
|
||||||
if (!model || !model.modelUrl) log('load model failed:', config.face.age.modelPath);
|
if (!model || !model.modelUrl) log('load model failed:', config.face.age.modelPath);
|
||||||
|
@ -19,7 +23,8 @@ export async function load(config) {
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image, config) {
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
export async function predict(image: Tensor, config: Config | any) {
|
||||||
if (!model) return null;
|
if (!model) return null;
|
||||||
if ((skipped < config.face.age.skipFrames) && config.skipFrame && last.age && (last.age > 0)) {
|
if ((skipped < config.face.age.skipFrames) && config.skipFrame && last.age && (last.age > 0)) {
|
||||||
skipped++;
|
skipped++;
|
||||||
|
|
|
@ -353,6 +353,7 @@ const config: Config = {
|
||||||
segmentation: {
|
segmentation: {
|
||||||
enabled: false, // if segmentation is enabled, output result.canvas will be augmented
|
enabled: false, // if segmentation is enabled, output result.canvas will be augmented
|
||||||
// with masked image containing only person output
|
// with masked image containing only person output
|
||||||
|
// segmentation is not triggered as part of detection and requires separate call to human.segmentation
|
||||||
modelPath: 'selfie.json', // experimental: object detection model, can be absolute path or relative to modelBasePath
|
modelPath: 'selfie.json', // experimental: object detection model, can be absolute path or relative to modelBasePath
|
||||||
// can be 'selfie' or 'meet'
|
// can be 'selfie' or 'meet'
|
||||||
},
|
},
|
||||||
|
|
|
@ -5,8 +5,10 @@
|
||||||
|
|
||||||
import { log, join } from '../helpers';
|
import { log, join } from '../helpers';
|
||||||
import * as tf from '../../dist/tfjs.esm.js';
|
import * as tf from '../../dist/tfjs.esm.js';
|
||||||
|
import { Config } from '../config';
|
||||||
|
import { GraphModel, Tensor } from '../tfjs/types';
|
||||||
|
|
||||||
let model;
|
let model: GraphModel;
|
||||||
let last = { gender: '' };
|
let last = { gender: '' };
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
let alternative = false;
|
let alternative = false;
|
||||||
|
@ -14,7 +16,8 @@ let alternative = false;
|
||||||
// tuning values
|
// tuning values
|
||||||
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
||||||
|
|
||||||
export async function load(config) {
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
export async function load(config: Config | any) {
|
||||||
if (!model) {
|
if (!model) {
|
||||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath));
|
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath));
|
||||||
alternative = model.inputs[0].shape[3] === 1;
|
alternative = model.inputs[0].shape[3] === 1;
|
||||||
|
@ -24,7 +27,8 @@ export async function load(config) {
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image, config) {
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
export async function predict(image: Tensor, config: Config | any) {
|
||||||
if (!model) return null;
|
if (!model) return null;
|
||||||
if ((skipped < config.face.gender.skipFrames) && config.skipFrame && last.gender !== '') {
|
if ((skipped < config.face.gender.skipFrames) && config.skipFrame && last.gender !== '') {
|
||||||
skipped++;
|
skipped++;
|
||||||
|
|
30
src/human.ts
30
src/human.ts
|
@ -31,16 +31,16 @@ import { Tensor } from './tfjs/types';
|
||||||
|
|
||||||
// export types
|
// export types
|
||||||
export type { Config } from './config';
|
export type { Config } from './config';
|
||||||
export type { Result, Face, Hand, Body, Item, Gesture } from './result';
|
export type { Result, Face, Hand, Body, Item, Gesture, Person } from './result';
|
||||||
export type { DrawOptions } from './draw/draw';
|
export type { DrawOptions } from './draw/draw';
|
||||||
|
|
||||||
/** Defines all possible input types for **Human** detection
|
/** Defines all possible input types for **Human** detection
|
||||||
* @typedef Input
|
* @typedef Input Type
|
||||||
*/
|
*/
|
||||||
export type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
export type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
||||||
|
|
||||||
/** Error message
|
/** Error message
|
||||||
* @typedef Error
|
* @typedef Error Type
|
||||||
*/
|
*/
|
||||||
export type Error = { error: string };
|
export type Error = { error: string };
|
||||||
|
|
||||||
|
@ -205,6 +205,7 @@ export class Human {
|
||||||
|
|
||||||
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
||||||
* - Calculation is based on normalized Minkowski distance between
|
* - Calculation is based on normalized Minkowski distance between
|
||||||
|
*
|
||||||
* @param embedding1: face descriptor as array of numbers
|
* @param embedding1: face descriptor as array of numbers
|
||||||
* @param embedding2: face descriptor as array of numbers
|
* @param embedding2: face descriptor as array of numbers
|
||||||
* @returns similarity: number
|
* @returns similarity: number
|
||||||
|
@ -214,6 +215,19 @@ export class Human {
|
||||||
return faceres.similarity(embedding1, embedding2);
|
return faceres.similarity(embedding1, embedding2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Segmentation method takes any input and returns processed canvas with body segmentation
|
||||||
|
* Optional parameter background is used to fill the background with specific input
|
||||||
|
* Segmentation is not triggered as part of detect process
|
||||||
|
*
|
||||||
|
* @param input: {@link Input}
|
||||||
|
* @param background?: {@link Input}
|
||||||
|
* @returns Canvas
|
||||||
|
*/
|
||||||
|
segmentation(input: Input, background?: Input) {
|
||||||
|
return segmentation.process(input, background, this.config);
|
||||||
|
}
|
||||||
|
|
||||||
/** Enhance method performs additional enhacements to face image previously detected for futher processing
|
/** Enhance method performs additional enhacements to face image previously detected for futher processing
|
||||||
* @param input: Tensor as provided in human.result.face[n].tensor
|
* @param input: Tensor as provided in human.result.face[n].tensor
|
||||||
* @returns Tensor
|
* @returns Tensor
|
||||||
|
@ -372,7 +386,8 @@ export class Human {
|
||||||
/**
|
/**
|
||||||
* Runs interpolation using last known result and returns smoothened result
|
* Runs interpolation using last known result and returns smoothened result
|
||||||
* Interpolation is based on time since last known result so can be called independently
|
* Interpolation is based on time since last known result so can be called independently
|
||||||
* @param result?: use specific result set to run interpolation on
|
*
|
||||||
|
* @param result?: {@link Result} optional use specific result set to run interpolation on
|
||||||
* @returns result: {@link Result}
|
* @returns result: {@link Result}
|
||||||
*/
|
*/
|
||||||
next = (result?: Result) => interpolate.calc(result || this.result) as Result;
|
next = (result?: Result) => interpolate.calc(result || this.result) as Result;
|
||||||
|
@ -410,9 +425,10 @@ export class Human {
|
||||||
* - Pre-process input: {@link Input}
|
* - Pre-process input: {@link Input}
|
||||||
* - Run inference for all configured models
|
* - Run inference for all configured models
|
||||||
* - Process and return result: {@link Result}
|
* - Process and return result: {@link Result}
|
||||||
|
*
|
||||||
* @param input: Input
|
* @param input: Input
|
||||||
* @param userConfig?: Config
|
* @param userConfig?: {@link Config}
|
||||||
* @returns result: Result
|
* @returns result: {@link Result}
|
||||||
*/
|
*/
|
||||||
async detect(input: Input, userConfig?: Config | Record<string, unknown>): Promise<Result | Error> {
|
async detect(input: Input, userConfig?: Config | Record<string, unknown>): Promise<Result | Error> {
|
||||||
// detection happens inside a promise
|
// detection happens inside a promise
|
||||||
|
@ -558,6 +574,7 @@ export class Human {
|
||||||
}
|
}
|
||||||
|
|
||||||
// run segmentation
|
// run segmentation
|
||||||
|
/* not triggered as part of detect
|
||||||
if (this.config.segmentation.enabled) {
|
if (this.config.segmentation.enabled) {
|
||||||
this.analyze('Start Segmentation:');
|
this.analyze('Start Segmentation:');
|
||||||
this.state = 'run:segmentation';
|
this.state = 'run:segmentation';
|
||||||
|
@ -567,6 +584,7 @@ export class Human {
|
||||||
if (elapsedTime > 0) this.performance.segmentation = elapsedTime;
|
if (elapsedTime > 0) this.performance.segmentation = elapsedTime;
|
||||||
this.analyze('End Segmentation:');
|
this.analyze('End Segmentation:');
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
this.performance.total = Math.trunc(now() - timeStart);
|
this.performance.total = Math.trunc(now() - timeStart);
|
||||||
this.state = 'idle';
|
this.state = 'idle';
|
||||||
|
|
|
@ -5,6 +5,9 @@
|
||||||
import * as tf from '../../dist/tfjs.esm.js';
|
import * as tf from '../../dist/tfjs.esm.js';
|
||||||
import * as fxImage from './imagefx';
|
import * as fxImage from './imagefx';
|
||||||
import { Tensor } from '../tfjs/types';
|
import { Tensor } from '../tfjs/types';
|
||||||
|
import { Config } from '../config';
|
||||||
|
|
||||||
|
type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
||||||
|
|
||||||
const maxSize = 2048;
|
const maxSize = 2048;
|
||||||
// internal temp canvases
|
// internal temp canvases
|
||||||
|
@ -16,7 +19,7 @@ let fx;
|
||||||
// process input image and return tensor
|
// process input image and return tensor
|
||||||
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement
|
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement
|
||||||
// input is resized and run through imagefx filter
|
// input is resized and run through imagefx filter
|
||||||
export function process(input, config): { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement } {
|
export function process(input: Input, config: Config): { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement } {
|
||||||
let tensor;
|
let tensor;
|
||||||
if (!input) throw new Error('Human: Input is missing');
|
if (!input) throw new Error('Human: Input is missing');
|
||||||
// sanity checks since different browsers do not implement all dom elements
|
// sanity checks since different browsers do not implement all dom elements
|
||||||
|
|
|
@ -124,6 +124,7 @@ export interface Item {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Gesture results
|
/** Gesture results
|
||||||
|
* @typedef Gesture Type
|
||||||
*
|
*
|
||||||
* Array of individual results with one object per detected gesture
|
* Array of individual results with one object per detected gesture
|
||||||
* Each result has:
|
* Each result has:
|
||||||
|
@ -137,6 +138,7 @@ export type Gesture =
|
||||||
| { 'hand': number, gesture: string }
|
| { 'hand': number, gesture: string }
|
||||||
|
|
||||||
/** Person getter
|
/** Person getter
|
||||||
|
* @interface Person Interface
|
||||||
*
|
*
|
||||||
* Each result has:
|
* Each result has:
|
||||||
* - id: person id
|
* - id: person id
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
|
"noEmitOnError": false,
|
||||||
"module": "es2020",
|
"module": "es2020",
|
||||||
"target": "es2018",
|
"target": "es2018",
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "node",
|
||||||
|
@ -18,7 +19,14 @@
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
"sourceMap": false,
|
"sourceMap": false,
|
||||||
"strictNullChecks": true,
|
"strictNullChecks": true,
|
||||||
"allowJs": true
|
"allowJs": true,
|
||||||
|
"baseUrl": "./",
|
||||||
|
"paths": {
|
||||||
|
"tslib": ["node_modules/tslib/tslib.d.ts"],
|
||||||
|
"@tensorflow/tfjs-node/dist/io/file_system": ["node_modules/@tensorflow/tfjs-node/dist/io/file_system.js"],
|
||||||
|
"@tensorflow/tfjs-core/dist/index": ["node_modules/@tensorflow/tfjs-core/dist/index.js"],
|
||||||
|
"@tensorflow/tfjs-converter/dist/index": ["node_modules/@tensorflow/tfjs-converter/dist/index.js"]
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
|
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
|
||||||
"include": ["src/*", "src/***/*"],
|
"include": ["src/*", "src/***/*"],
|
||||||
|
@ -35,6 +43,6 @@
|
||||||
"entryPoints": "src/human.ts",
|
"entryPoints": "src/human.ts",
|
||||||
"logLevel": "Info",
|
"logLevel": "Info",
|
||||||
"logger": "none",
|
"logger": "none",
|
||||||
"theme": "wiki/theme/",
|
"theme": "wiki/theme/"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue