add dynamic kernel op detection

pull/356/head
Vladimir Mandic 2021-09-12 13:17:33 -04:00
parent 7fa09937b4
commit 203dbffa1a
8 changed files with 21 additions and 13 deletions

View File

@ -11,6 +11,7 @@
### **HEAD -> main** 2021/09/12 mandic00@live.com
- minor typos
- release candidate
- parametrize face config
- mark all config items as optional

View File

@ -121,7 +121,7 @@ export async function get() {
if (env.webgpu.supported) env.webgpu.adapter = (await navigator['gpu'].requestAdapter())?.name;
// enumerate kernels
env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName);
env.kernels = tf.getKernelsForBackend(tf.getBackend()).map((kernel) => kernel.kernelName.toLowerCase());
// get cpu info
// cpuinfo();

View File

@ -3,6 +3,7 @@ import * as box from './box';
import * as util from './util';
import * as detector from './handdetector';
import { Tensor, GraphModel } from '../tfjs/types';
import { env } from '../env';
const palmBoxEnlargeFactor = 5; // default 3
const handBoxEnlargeFactor = 1.65; // default 1.65
@ -109,7 +110,7 @@ export class HandPipeline {
const angle = config.hand.rotation ? util.computeRotation(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = box.getBoxCenter(currentBox);
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
const rotatedImage = config.hand.rotation && tf.ENV.flags.IS_BROWSER ? tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized) : image.clone();
const rotatedImage = config.hand.rotation && env.kernels.includes('rotatewithoffset') ? tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized) : image.clone();
const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = box.cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);

View File

@ -226,7 +226,7 @@ export class Human {
#sanity = (input: Input): null | string => {
if (!this.#checkSanity) return null;
if (!input) return 'input is not defined';
if (this.tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) return 'input must be a tensor';
if (this.env.node && !(input instanceof tf.Tensor)) return 'input must be a tensor';
try {
this.tf.getBackend();
} catch {
@ -293,10 +293,10 @@ export class Human {
if (this.#firstRun) { // print version info on first run and check for correct backend setup
if (this.config.debug) log(`version: ${this.version}`);
if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`);
if (this.config.debug) log('environment:', env.env);
// if (this.config.debug) log('environment:', this.env);
await this.#checkBackend();
if (this.tf.ENV.flags.IS_BROWSER) {
if (this.env.browser) {
if (this.config.debug) log('configuration:', this.config);
if (this.config.debug) log('tf flags:', this.tf.ENV.flags);
}
@ -322,7 +322,7 @@ export class Human {
// check if backend needs initialization if it changed
/** @hidden */
#checkBackend = async () => {
if (this.#firstRun || (this.config.backend && (this.config.backend.length > 0) || (this.tf.getBackend() !== this.config.backend))) {
if (this.#firstRun || (this.config.backend && (this.config.backend.length > 0) && (this.tf.getBackend() !== this.config.backend))) {
const timeStamp = now();
this.state = 'backend';
/* force backend reload
@ -343,17 +343,17 @@ export class Human {
}
// force browser vs node backend
if (this.tf.ENV.flags.IS_BROWSER && this.config.backend === 'tensorflow') {
if (this.env.browser && this.config.backend === 'tensorflow') {
log('override: backend set to tensorflow while running in browser');
this.config.backend = 'humangl';
}
if (this.tf.ENV.flags.IS_NODE && (this.config.backend === 'webgl' || this.config.backend === 'humangl')) {
if (this.env.node && (this.config.backend === 'webgl' || this.config.backend === 'humangl')) {
log(`override: backend set to ${this.config.backend} while running in nodejs`);
this.config.backend = 'tensorflow';
}
// handle webgpu
if (this.tf.ENV.flags.IS_BROWSER && this.config.backend === 'webgpu') {
if (this.env.browser && this.config.backend === 'webgpu') {
if (typeof navigator === 'undefined' || typeof navigator['gpu'] === 'undefined') {
log('override: backend set to webgpu but browser does not support webgpu');
this.config.backend = 'humangl';
@ -370,8 +370,8 @@ export class Human {
if (!available.includes(this.config.backend)) {
log(`error: backend ${this.config.backend} not found in registry`);
this.config.backend = this.tf.ENV.flags.IS_NODE ? 'tensorflow' : 'humangl';
log(`override: using backend ${this.config.backend} instead`);
this.config.backend = this.env.node ? 'tensorflow' : 'humangl';
log(`override: setting backend ${this.config.backend}`);
}
if (this.config.debug) log('setting backend:', this.config.backend);
@ -415,6 +415,7 @@ export class Human {
this.tf.enableProdMode();
await this.tf.ready();
this.performance.backend = Math.trunc(now() - timeStamp);
this.config.backend = this.tf.getBackend();
env.get(); // update env on backend init
this.env = env.env;

View File

@ -6,6 +6,7 @@ import * as tf from '../../dist/tfjs.esm.js';
import * as fxImage from './imagefx';
import { Tensor } from '../tfjs/types';
import { Config } from '../config';
import { env } from '../env';
type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
@ -90,7 +91,7 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
if (outCanvas?.width !== inCanvas?.width) outCanvas.width = inCanvas?.width;
if (outCanvas?.height !== inCanvas?.height) outCanvas.height = inCanvas?.height;
// log('created FX filter');
fx = tf.ENV.flags.IS_BROWSER ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
fx = env.browser ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
}
if (!fx) return { tensor: null, canvas: inCanvas };
fx.reset();

View File

@ -8,6 +8,7 @@ import { labels } from './labels';
import { ObjectResult } from '../result';
import { GraphModel, Tensor } from '../tfjs/types';
import { Config } from '../config';
import { env } from '../env';
let model;
let last: ObjectResult[] = [];
@ -76,6 +77,7 @@ export async function predict(input: Tensor, config: Config): Promise<ObjectResu
return last;
}
skipped = 0;
if (!env.kernels.includes('mod') || !env.kernels.includes('sparsetodense')) return last;
return new Promise(async (resolve) => {
const outputSize = [input.shape[2], input.shape[1]];
const resize = tf.image.resizeBilinear(input, [model.inputSize, model.inputSize]);

View File

@ -8,6 +8,7 @@ import { labels } from './labels';
import { ObjectResult } from '../result';
import { GraphModel, Tensor } from '../tfjs/types';
import { Config } from '../config';
import { env } from '../env';
let model;
let last: Array<ObjectResult> = [];
@ -108,6 +109,7 @@ export async function predict(image: Tensor, config: Config): Promise<ObjectResu
return last;
}
skipped = 0;
if (!env.kernels.includes('mod') || !env.kernels.includes('sparsetodense')) return last;
return new Promise(async (resolve) => {
const outputSize = [image.shape[2], image.shape[1]];
const resize = tf.image.resizeBilinear(image, [model.inputSize, model.inputSize], false);

2
wiki

@ -1 +1 @@
Subproject commit ee12bda3113d3d893c898a9827f9c174d4058fb8
Subproject commit 0e902fcb57bdf9b65ed5e7ef281a699e95db6d99