complete async work

pull/356/head
Vladimir Mandic 2021-08-14 11:16:26 -04:00
parent 451e88e1bf
commit eadc65cc5a
7 changed files with 47 additions and 36 deletions

View File

@ -41,9 +41,9 @@ let userConfig = {
flip: false,
},
face: { enabled: true,
detector: { return: false },
detector: { return: false, rotation: true },
mesh: { enabled: true },
iris: { enabled: false },
iris: { enabled: true },
description: { enabled: false },
emotion: { enabled: false },
},
@ -441,9 +441,9 @@ function webWorker(input, image, canvas, timestamp) {
// main processing function when input is webcam, can use direct invocation or web worker
function runHumanDetect(input, canvas, timestamp) {
// if live video
const videoLive = (input.readyState > 2) && (!input.paused);
const cameraLive = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live');
const live = videoLive || cameraLive;
const videoLive = input.readyState > 2;
const cameraLive = input.srcObject?.getVideoTracks()[0].readyState === 'live';
const live = (videoLive || cameraLive) && (!input.paused);
if (!live) {
// stop ui refresh
// if (ui.drawThread) cancelAnimationFrame(ui.drawThread);

View File

@ -83,7 +83,7 @@
"node-fetch": "^2.6.1",
"rimraf": "^3.0.2",
"seedrandom": "^3.0.5",
"simple-git": "^2.43.0",
"simple-git": "^2.44.0",
"tslib": "^2.3.1",
"typedoc": "0.21.5",
"typescript": "4.3.5"

View File

@ -34,14 +34,10 @@ export async function load(config: Config): Promise<GraphModel> {
function max2d(inputs, minScore) {
const [width, height] = inputs.shape;
return tf.tidy(() => {
// modulus op implemented in tf
const mod = (a, b) => tf.sub(a, tf.mul(tf.div(a, tf.scalar(b, 'int32')), tf.scalar(b, 'int32')));
// combine all data
const reshaped = tf.reshape(inputs, [height * width]);
// get highest score
const newScore = tf.max(reshaped, 0).dataSync()[0]; // inside tf.tidy
if (newScore > minScore) {
// skip coordinate calculation is score is too low
const mod = (a, b) => tf.sub(a, tf.mul(tf.div(a, tf.scalar(b, 'int32')), tf.scalar(b, 'int32'))); // modulus op implemented in tf
const reshaped = tf.reshape(inputs, [height * width]); // combine all data
const newScore = tf.max(reshaped, 0).dataSync()[0]; // get highest score // inside tf.tidy
if (newScore > minScore) { // skip coordinate calculation is score is too low
const coords = tf.argMax(reshaped, 0);
const x = mod(coords, width).dataSync()[0]; // inside tf.tidy
const y = tf.div(coords, tf.scalar(width, 'int32')).dataSync()[0]; // inside tf.tidy

View File

@ -133,23 +133,23 @@ export async function predict(image: Tensor, config: Config, idx, count) {
tf.dispose(enhanced);
if (resT) {
tf.tidy(() => {
const gender = resT.find((t) => t.shape[1] === 1).dataSync(); // inside tf.tidy
const confidence = Math.trunc(200 * Math.abs((gender[0] - 0.5))) / 100;
if (confidence > config.face.description.minConfidence) {
obj.gender = gender[0] <= 0.5 ? 'female' : 'male';
obj.genderScore = Math.min(0.99, confidence);
}
const age = tf.argMax(resT.find((t) => t.shape[1] === 100), 1).dataSync()[0]; // inside tf.tidy
const all = resT.find((t) => t.shape[1] === 100).dataSync(); // inside tf.tidy
obj.age = Math.round(all[age - 1] > all[age + 1] ? 10 * age - 100 * all[age - 1] : 10 * age + 100 * all[age + 1]) / 10;
const gender = await resT.find((t) => t.shape[1] === 1).data();
const confidence = Math.trunc(200 * Math.abs((gender[0] - 0.5))) / 100;
if (confidence > config.face.description.minConfidence) {
obj.gender = gender[0] <= 0.5 ? 'female' : 'male';
obj.genderScore = Math.min(0.99, confidence);
}
const argmax = tf.argMax(resT.find((t) => t.shape[1] === 100), 1);
const age = (await argmax.data())[0];
const all = await resT.find((t) => t.shape[1] === 100).data(); // inside tf.tidy
obj.age = Math.round(all[age - 1] > all[age + 1] ? 10 * age - 100 * all[age - 1] : 10 * age + 100 * all[age + 1]) / 10;
const desc = resT.find((t) => t.shape[1] === 1024);
// const reshape = desc.reshape([128, 8]); // reshape large 1024-element descriptor to 128 x 8
// const reduce = reshape.logSumExp(1); // reduce 2nd dimension by calculating logSumExp on it which leaves us with 128-element descriptor
const desc = resT.find((t) => t.shape[1] === 1024);
// const reshape = desc.reshape([128, 8]); // reshape large 1024-element descriptor to 128 x 8
// const reduce = reshape.logSumExp(1); // reduce 2nd dimension by calculating logSumExp on it which leaves us with 128-element descriptor
obj.descriptor = [...desc.dataSync()]; // inside tf.tidy
});
const descriptor = await desc.data();
obj.descriptor = [...descriptor];
resT.forEach((t) => tf.dispose(t));
}

View File

@ -87,10 +87,10 @@ export async function predict(image: Tensor, config: Config | any) {
obj.confidence = Math.min(0.99, confidence);
}
/*
let age = genderT[1].argMax(1).dataSync()[0];
const all = genderT[1].dataSync();
let age = (await genderT[1].argMax(1).data())[0];
const all = await genderT[1].data();
age = Math.round(all[age - 1] > all[age + 1] ? 10 * age - 100 * all[age - 1] : 10 * age + 100 * all[age + 1]) / 10;
const descriptor = genderT[1].dataSync();
const descriptor = await genderT[1].data();
*/
genderT.forEach((t) => tf.dispose(t));
}

View File

@ -118,7 +118,7 @@ export class HandPipeline {
tf.dispose(rotatedImage);
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage) as Array<Tensor>;
tf.dispose(handImage);
const confidence = confidenceT.dataSync()[0];
const confidence = (await confidenceT.data())[0];
tf.dispose(confidenceT);
if (confidence >= config.hand.minConfidence) {
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);

View File

@ -302,8 +302,23 @@ export class Human {
if (typeof window === 'undefined' && typeof WorkerGlobalScope !== 'undefined' && this.config.debug) log('running inside web worker');
// force browser vs node backend
if (this.tf.ENV.flags.IS_BROWSER && this.config.backend === 'tensorflow') this.config.backend = 'webgl';
if (this.tf.ENV.flags.IS_NODE && (this.config.backend === 'webgl' || this.config.backend === 'humangl')) this.config.backend = 'tensorflow';
if (this.tf.ENV.flags.IS_BROWSER && this.config.backend === 'tensorflow') {
if (this.config.debug) log('override: backend set to tensorflow while running in browser');
this.config.backend = 'humangl';
}
if (this.tf.ENV.flags.IS_NODE && (this.config.backend === 'webgl' || this.config.backend === 'humangl')) {
if (this.config.debug) log('override: backend set to webgl while running in nodejs');
this.config.backend = 'tensorflow';
}
const available = Object.keys(this.tf.engine().registryFactory);
if (this.config.debug) log('available backends:', available);
if (!available.includes(this.config.backend)) {
log(`error: backend ${this.config.backend} not found in registry`);
this.config.backend = this.tf.ENV.flags.IS_NODE ? 'tensorflow' : 'humangl';
log(`override: using backend ${this.config.backend} instead`);
}
if (this.config.debug) log('setting backend:', this.config.backend);
@ -363,7 +378,7 @@ export class Human {
// use tensor sum
/*
const sumT = this.tf.sum(reduced);
const sum = sumT.dataSync()[0] as number;
const sum = await sumT.data()[0] as number;
sumT.dispose();
*/
// use js loop sum, faster than uploading tensor to gpu calculating and downloading back