modularize build platform

pull/356/head
Vladimir Mandic 2021-06-05 17:51:46 -04:00
parent d3bea52d51
commit 525634ad26
12 changed files with 59 additions and 46 deletions

View File

@ -11,9 +11,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
### **HEAD -> main** 2021/06/05 mandic00@live.com ### **HEAD -> main** 2021/06/05 mandic00@live.com
- minor git corruption
### **origin/main** 2021/06/05 mandic00@live.com
- unified build - unified build
- enable body segmentation and background replacement - enable body segmentation and background replacement
- work on body segmentation - work on body segmentation

View File

@ -27,9 +27,8 @@ const userConfig = {
hand: { enabled: false }, hand: { enabled: false },
gesture: { enabled: false }, gesture: { enabled: false },
body: { enabled: false }, body: { enabled: false },
filter: { filter: { enabled: true },
enabled: false, segmentation: { enabled: false },
},
}; };
const human = new Human(userConfig); // new instance of human const human = new Human(userConfig); // new instance of human

View File

@ -31,6 +31,7 @@ let userConfig = {
warmup: 'none', warmup: 'none',
backend: 'humangl', backend: 'humangl',
wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.6.0/dist/', wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.6.0/dist/',
segmentation: { enabled: true },
/* /*
async: false, async: false,
cacheSensitivity: 0, cacheSensitivity: 0,
@ -210,10 +211,9 @@ async function drawResults(input) {
// draw fps chart // draw fps chart
await menu.process.updateChart('FPS', ui.detectFPS); await menu.process.updateChart('FPS', ui.detectFPS);
// get updated canvas if missing or if we want buffering, but skip if segmentation is enabled if (userConfig.segmentation.enabled && ui.buffered) { // refresh segmentation if using buffered output
if (userConfig.segmentation.enabled) {
result.canvas = await human.segmentation(input, ui.background, userConfig); result.canvas = await human.segmentation(input, ui.background, userConfig);
} else if (!result.canvas || ui.buffered) { } else if (!result.canvas || ui.buffered) { // refresh with input if using buffered output or if missing canvas
const image = await human.image(input); const image = await human.image(input);
result.canvas = image.canvas; result.canvas = image.canvas;
human.tf.dispose(image.tensor); human.tf.dispose(image.tensor);

View File

@ -198,7 +198,10 @@ export interface Config {
}, },
/** Controlls and configures all body segmentation module /** Controlls and configures all body segmentation module
* if segmentation is enabled, output result.canvas will be augmented with masked image containing only person output * removes background from input containing person
* if segmentation is enabled it will run as preprocessing task before any other model
* alternatively leave it disabled and use it on-demand using human.segmentation method which can
* remove background or replace it with user-provided background
* *
* - enabled: true/false * - enabled: true/false
* - modelPath: object detection model, can be absolute path or relative to modelBasePath * - modelPath: object detection model, can be absolute path or relative to modelBasePath
@ -351,9 +354,11 @@ const config: Config = {
}, },
segmentation: { segmentation: {
enabled: false, // if segmentation is enabled, output result.canvas will be augmented enabled: false, // controlls and configures all body segmentation module
// with masked image containing only person output // removes background from input containing person
// segmentation is not triggered as part of detection and requires separate call to human.segmentation // if segmentation is enabled it will run as preprocessing task before any other model
// alternatively leave it disabled and use it on-demand using human.segmentation method which can
// remove background or replace it with user-provided background
modelPath: 'selfie.json', // experimental: object detection model, can be absolute path or relative to modelBasePath modelPath: 'selfie.json', // experimental: object detection model, can be absolute path or relative to modelBasePath
// can be 'selfie' or 'meet' // can be 'selfie' or 'meet'
}, },

View File

@ -39,7 +39,7 @@ export function similarity(embedding1: Array<number>, embedding2: Array<number>,
if (embedding1?.length !== embedding2?.length) return 0; if (embedding1?.length !== embedding2?.length) return 0;
// general minkowski distance, euclidean distance is limited case where order is 2 // general minkowski distance, euclidean distance is limited case where order is 2
const distance = 5.0 * embedding1 const distance = 5.0 * embedding1
.map((val, i) => (Math.abs(embedding1[i] - embedding2[i]) ** order)) // distance squared .map((_val, i) => (Math.abs(embedding1[i] - embedding2[i]) ** order)) // distance squared
.reduce((sum, now) => (sum + now), 0) // sum all distances .reduce((sum, now) => (sum + now), 0) // sum all distances
** (1 / order); // get root of ** (1 / order); // get root of
const res = Math.max(0, 100 - distance) / 100.0; const res = Math.max(0, 100 - distance) / 100.0;

View File

@ -435,6 +435,7 @@ export class Human {
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
this.state = 'config'; this.state = 'config';
let timeStamp; let timeStamp;
let elapsedTime;
// update configuration // update configuration
this.config = mergeDeep(this.config, userConfig) as Config; this.config = mergeDeep(this.config, userConfig) as Config;
@ -473,14 +474,31 @@ export class Human {
*/ */
timeStamp = now(); timeStamp = now();
const process = image.process(input, this.config); let process = image.process(input, this.config);
this.performance.image = Math.trunc(now() - timeStamp);
this.analyze('Get Image:');
// run segmentation preprocessing
if (this.config.segmentation.enabled && process && process.tensor) {
this.analyze('Start Segmentation:');
this.state = 'run:segmentation';
timeStamp = now();
await segmentation.predict(process);
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.segmentation = elapsedTime;
if (process.canvas) {
// replace input
process.tensor.dispose();
process = image.process(process.canvas, this.config);
}
this.analyze('End Segmentation:');
}
if (!process || !process.tensor) { if (!process || !process.tensor) {
log('could not convert input to tensor'); log('could not convert input to tensor');
resolve({ error: 'could not convert input to tensor' }); resolve({ error: 'could not convert input to tensor' });
return; return;
} }
this.performance.image = Math.trunc(now() - timeStamp);
this.analyze('Get Image:');
timeStamp = now(); timeStamp = now();
this.config.skipFrame = await this.#skipFrame(process.tensor); this.config.skipFrame = await this.#skipFrame(process.tensor);
@ -497,7 +515,6 @@ export class Human {
let bodyRes; let bodyRes;
let handRes; let handRes;
let objectRes; let objectRes;
let elapsedTime;
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
if (this.config.async) { if (this.config.async) {
@ -573,19 +590,6 @@ export class Human {
else if (this.performance.gesture) delete this.performance.gesture; else if (this.performance.gesture) delete this.performance.gesture;
} }
// run segmentation
/* not triggered as part of detect
if (this.config.segmentation.enabled) {
this.analyze('Start Segmentation:');
this.state = 'run:segmentation';
timeStamp = now();
await segmentation.predict(process, this.config);
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.segmentation = elapsedTime;
this.analyze('End Segmentation:');
}
*/
this.performance.total = Math.trunc(now() - timeStart); this.performance.total = Math.trunc(now() - timeStart);
this.state = 'idle'; this.state = 'idle';
this.result = { this.result = {

View File

@ -96,7 +96,7 @@ async function process(res, inputSize, outputShape, config) {
// filter & sort results // filter & sort results
results = results results = results
.filter((a, idx) => nmsIdx.includes(idx)) .filter((_val, idx) => nmsIdx.includes(idx))
.sort((a, b) => (b.score - a.score)); .sort((a, b) => (b.score - a.score));
return results; return results;

View File

@ -6,9 +6,6 @@ const config = {
backend: 'tensorflow', backend: 'tensorflow',
debug: false, debug: false,
async: false, async: false,
filter: {
enabled: true,
},
face: { face: {
enabled: true, enabled: true,
detector: { enabled: true, rotation: true }, detector: { enabled: true, rotation: true },
@ -20,6 +17,8 @@ const config = {
hand: { enabled: true }, hand: { enabled: true },
body: { enabled: true }, body: { enabled: true },
object: { enabled: true }, object: { enabled: true },
segmentation: { enabled: true },
filter: { enabled: false },
}; };
test(Human, config); test(Human, config);

View File

@ -8,9 +8,6 @@ const config = {
// wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.6.0/dist/', // wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.6.0/dist/',
debug: false, debug: false,
async: false, async: false,
filter: {
enabled: true,
},
face: { face: {
enabled: true, enabled: true,
detector: { enabled: true, rotation: true }, detector: { enabled: true, rotation: true },
@ -22,6 +19,8 @@ const config = {
hand: { enabled: true }, hand: { enabled: true },
body: { enabled: true }, body: { enabled: true },
object: { enabled: false }, object: { enabled: false },
segmentation: { enabled: true },
filter: { enabled: false },
}; };
test(Human, config); test(Human, config);

View File

@ -6,9 +6,6 @@ const config = {
backend: 'tensorflow', backend: 'tensorflow',
debug: false, debug: false,
async: false, async: false,
filter: {
enabled: true,
},
face: { face: {
enabled: true, enabled: true,
detector: { enabled: true, rotation: true }, detector: { enabled: true, rotation: true },
@ -20,6 +17,8 @@ const config = {
hand: { enabled: true }, hand: { enabled: true },
body: { enabled: true }, body: { enabled: true },
object: { enabled: true }, object: { enabled: true },
segmentation: { enabled: true },
filter: { enabled: false },
}; };
test(Human, config); test(Human, config);

View File

@ -7,17 +7,16 @@
"typeRoots": ["node_modules/@types"], "typeRoots": ["node_modules/@types"],
"outDir": "types", "outDir": "types",
"declaration": true, "declaration": true,
"allowSyntheticDefaultImports": true,
"emitDeclarationOnly": true, "emitDeclarationOnly": true,
"emitDecoratorMetadata": true, "emitDecoratorMetadata": true,
"experimentalDecorators": true, "experimentalDecorators": true,
"importHelpers": true, "importHelpers": true,
"noImplicitAny": false,
"preserveConstEnums": true, "preserveConstEnums": true,
"removeComments": false, "removeComments": false,
"resolveJsonModule": true, "resolveJsonModule": true,
"skipLibCheck": true, "skipLibCheck": true,
"sourceMap": false, "sourceMap": true,
"strictNullChecks": true,
"allowJs": true, "allowJs": true,
"baseUrl": "./", "baseUrl": "./",
"paths": { "paths": {
@ -25,10 +24,21 @@
"@tensorflow/tfjs-node/dist/io/file_system": ["node_modules/@tensorflow/tfjs-node/dist/io/file_system.js"], "@tensorflow/tfjs-node/dist/io/file_system": ["node_modules/@tensorflow/tfjs-node/dist/io/file_system.js"],
"@tensorflow/tfjs-core/dist/index": ["node_modules/@tensorflow/tfjs-core/dist/index.js"], "@tensorflow/tfjs-core/dist/index": ["node_modules/@tensorflow/tfjs-core/dist/index.js"],
"@tensorflow/tfjs-converter/dist/index": ["node_modules/@tensorflow/tfjs-converter/dist/index.js"] "@tensorflow/tfjs-converter/dist/index": ["node_modules/@tensorflow/tfjs-converter/dist/index.js"]
} },
"strictNullChecks": true,
"noImplicitAny": false,
"noUnusedLocals": false,
"noImplicitReturns": true,
"noImplicitThis": true,
"alwaysStrict": true,
"noUnusedParameters": true,
"pretty": true,
"noFallthroughCasesInSwitch": true,
"allowUnreachableCode": false
}, },
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 }, "formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
"include": ["src/*", "src/***/*"], "include": ["src/*", "src/***/*"],
"exclude": ["node_modules/"],
"typedocOptions": { "typedocOptions": {
"excludePrivate": true, "excludePrivate": true,
"excludeExternals": true, "excludeExternals": true,

2
wiki

@ -1 +1 @@
Subproject commit c9408224d824368facc264c00e05d7b520d69051 Subproject commit 9e92e5eec1e60b5ea58dbf1c4bbc67c828bcf673